Compare commits
No commits in common. "main" and "main" have entirely different histories.
12
Makefile
12
Makefile
@ -1,13 +1,7 @@
|
|||||||
all:
|
all:
|
||||||
isort *.py lib/*py tests/*py
|
isort -rc .
|
||||||
autoflake --in-place --remove-unused-variables *.py lib/*py tests/*py
|
autoflake -r --in-place --remove-unused-variables .
|
||||||
black *.py lib/*py tests/*py
|
black .
|
||||||
|
|
||||||
test:
|
test:
|
||||||
python3 -m unittest -v tests/*.py
|
python3 -m unittest -v tests/*.py
|
||||||
|
|
||||||
update-packages:
|
|
||||||
f=$$(mktemp) ;\
|
|
||||||
osc api /source/openSUSE:Factory?view=info | grep -v lsrcmd5 | grep srcmd5= | sed -e 's,.*package=",,; s,".*,,' | grep -v : > $$f ;\
|
|
||||||
echo _project >> $$f ;\
|
|
||||||
mv $$f packages
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
sudo zypper in python3-psycopg
|
sudo zypper in python3-psycopg2
|
||||||
sudo su - postgres
|
sudo su - postgres
|
||||||
# `createdb -O <LOCAL_USER> imported_git`
|
# `createdb -O <LOCAL_USER> imported_git`
|
||||||
|
|
||||||
|
@ -42,36 +42,16 @@ PROJECTS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def export_package(project, package, repodir, cachedir, gc):
|
|
||||||
exporter = GitExporter(URL_OBS, project, package, repodir, cachedir)
|
|
||||||
exporter.set_gc_interval(gc)
|
|
||||||
exporter.export_as_git()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(description="OBS history importer into git")
|
parser = argparse.ArgumentParser(description="OBS history importer into git")
|
||||||
parser.add_argument("packages", help="OBS package names", nargs="*")
|
parser.add_argument("package", help="OBS package name")
|
||||||
parser.add_argument(
|
|
||||||
"-p",
|
|
||||||
"--project",
|
|
||||||
default="openSUSE:Factory",
|
|
||||||
help="Project to import/export, default is openSUSE:Factory",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-r",
|
"-r",
|
||||||
"--repodir",
|
"--repodir",
|
||||||
required=False,
|
required=False,
|
||||||
default=pathlib.Path("repos"),
|
|
||||||
type=pathlib.Path,
|
type=pathlib.Path,
|
||||||
help="Local git repository directory",
|
help="Local git repository directory",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"-c",
|
|
||||||
"--cachedir",
|
|
||||||
required=False,
|
|
||||||
type=pathlib.Path,
|
|
||||||
help="Local cache directory",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-g",
|
"-g",
|
||||||
"--gc",
|
"--gc",
|
||||||
@ -107,22 +87,17 @@ def main():
|
|||||||
requests_log.propagate = True
|
requests_log.propagate = True
|
||||||
|
|
||||||
if args.export:
|
if args.export:
|
||||||
if len(args.packages) != 1:
|
TestExporter(args.package).run()
|
||||||
print("Can only export one package")
|
|
||||||
sys.exit(1)
|
|
||||||
TestExporter(args.packages[0]).run()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if not args.cachedir:
|
if not args.repodir:
|
||||||
args.cachedir = pathlib.Path("~/.cache/git-import/").expanduser()
|
args.repodir = pathlib.Path("repos/" + args.package)
|
||||||
|
|
||||||
importer = Importer(URL_OBS, args.project, args.packages)
|
importer = Importer(URL_OBS, "openSUSE:Factory", args.package)
|
||||||
importer.import_into_db()
|
importer.import_into_db()
|
||||||
for package in args.packages:
|
exporter = GitExporter(URL_OBS, "openSUSE:Factory", args.package, args.repodir)
|
||||||
if not importer.package_with_scmsync(package):
|
exporter.set_gc_interval(args.gc)
|
||||||
export_package(args.project, package, args.repodir, args.cachedir, args.gc)
|
exporter.export_as_git()
|
||||||
else:
|
|
||||||
logging.debug(f"{args.project}/{package} has scmsync links - skipping export")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
1355
gone-packages.txt
1355
gone-packages.txt
File diff suppressed because it is too large
Load Diff
@ -1,10 +1,6 @@
|
|||||||
from abc import ABC, abstractmethod
|
class AbstractWalker:
|
||||||
|
|
||||||
|
|
||||||
class AbstractWalker(ABC):
|
|
||||||
"""Just a duck type, most likely not needed by python, but I
|
"""Just a duck type, most likely not needed by python, but I
|
||||||
find interface classes preferable (java school)"""
|
find interface classes preferable (java school)"""
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def call(self, node, is_source):
|
def call(self, node, is_source):
|
||||||
pass
|
pass
|
||||||
|
@ -25,28 +25,18 @@ BINARY = {
|
|||||||
".zst",
|
".zst",
|
||||||
}
|
}
|
||||||
|
|
||||||
TEXT_MIMETYPES = {
|
|
||||||
"message/rfc822",
|
|
||||||
"application/pgp-keys",
|
|
||||||
"application/x-gnupg-keyring",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def is_text_mimetype(mimetype):
|
|
||||||
if mimetype.startswith("text/"):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return mimetype.split(";")[0] in TEXT_MIMETYPES
|
|
||||||
|
|
||||||
|
|
||||||
def is_binary_or_large(filename, size):
|
def is_binary_or_large(filename, size):
|
||||||
"""Decide if is a binary file based on the extension or size"""
|
"""Decide if is a binary file based on the extension or size"""
|
||||||
binary_suffix = BINARY
|
binary_suffix = BINARY
|
||||||
non_binary_suffix = {
|
non_binary_suffix = {
|
||||||
|
".1",
|
||||||
|
".8",
|
||||||
".SUSE",
|
".SUSE",
|
||||||
".asc",
|
".asc",
|
||||||
".c",
|
".c",
|
||||||
".cabal",
|
".cabal",
|
||||||
|
".cfg",
|
||||||
".changes",
|
".changes",
|
||||||
".conf",
|
".conf",
|
||||||
".desktop",
|
".desktop",
|
||||||
|
@ -14,6 +14,8 @@ def config(filename="database.ini", section="production"):
|
|||||||
for param in params:
|
for param in params:
|
||||||
db[param[0]] = param[1]
|
db[param[0]] = param[1]
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Section {section} not found in the {filename} file")
|
raise Exception(
|
||||||
|
"Section {0} not found in the {1} file".format(section, filename)
|
||||||
|
)
|
||||||
|
|
||||||
return db
|
return db
|
||||||
|
60
lib/db.py
60
lib/db.py
@ -1,6 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import psycopg
|
import psycopg2
|
||||||
|
from psycopg2.extras import LoggingConnection
|
||||||
|
|
||||||
from lib.config import config
|
from lib.config import config
|
||||||
|
|
||||||
@ -16,20 +17,22 @@ class DB:
|
|||||||
# read the connection parameters
|
# read the connection parameters
|
||||||
params = config(section=self.config_section)
|
params = config(section=self.config_section)
|
||||||
# connect to the PostgreSQL server
|
# connect to the PostgreSQL server
|
||||||
self.conn = psycopg.connect(conninfo=f"dbname={params['database']}")
|
self.conn = psycopg2.connect(connection_factory=LoggingConnection, **params)
|
||||||
logging.getLogger("psycopg.pool").setLevel(logging.INFO)
|
logger = logging.getLogger(__name__)
|
||||||
|
self.conn.initialize(logger)
|
||||||
|
|
||||||
except (Exception, psycopg.DatabaseError) as error:
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
print(error)
|
print(error)
|
||||||
raise error
|
raise error
|
||||||
|
|
||||||
def schema_version(self):
|
def schema_version(self):
|
||||||
# create a cursor
|
# create a cursor
|
||||||
with self.conn.cursor() as cur:
|
with self.conn.cursor() as cur:
|
||||||
|
|
||||||
# execute a statement
|
# execute a statement
|
||||||
try:
|
try:
|
||||||
cur.execute("SELECT MAX(version) from scheme")
|
cur.execute("SELECT MAX(version) from scheme")
|
||||||
except psycopg.errors.UndefinedTable:
|
except psycopg2.errors.UndefinedTable as error:
|
||||||
cur.close()
|
cur.close()
|
||||||
self.close()
|
self.close()
|
||||||
self.connect()
|
self.connect()
|
||||||
@ -212,51 +215,6 @@ class DB:
|
|||||||
"CREATE INDEX ON linked_revs(considered)",
|
"CREATE INDEX ON linked_revs(considered)",
|
||||||
"UPDATE scheme SET version=20",
|
"UPDATE scheme SET version=20",
|
||||||
)
|
)
|
||||||
schemes[21] = (
|
|
||||||
"ALTER TABLE revisions ADD COLUMN api_url VARCHAR(40)",
|
|
||||||
"UPDATE revisions SET api_url='https://api.opensuse.org'",
|
|
||||||
"ALTER TABLE revisions ALTER COLUMN api_url SET NOT NULL",
|
|
||||||
"UPDATE scheme SET version=21",
|
|
||||||
)
|
|
||||||
schemes[22] = (
|
|
||||||
"""DROP TABLE IF EXISTS lfs_oids""",
|
|
||||||
"""
|
|
||||||
CREATE TABLE lfs_oids (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
project VARCHAR(255) NOT NULL,
|
|
||||||
package VARCHAR(255) NOT NULL,
|
|
||||||
filename VARCHAR(255) NOT NULL,
|
|
||||||
rev VARCHAR(40) NOT NULL,
|
|
||||||
sha256 VARCHAR(70) NOT NULL,
|
|
||||||
size INTEGER NOT NULL,
|
|
||||||
mimetype VARCHAR(255) NOT NULL,
|
|
||||||
file_md5 VARCHAR(40) NOT NULL
|
|
||||||
)
|
|
||||||
""",
|
|
||||||
"CREATE UNIQUE INDEX ON lfs_oids (sha256,size)",
|
|
||||||
"CREATE INDEX ON revisions(package)",
|
|
||||||
"""DROP TABLE IF EXISTS text_files""",
|
|
||||||
"""
|
|
||||||
CREATE TABLE text_files (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
package VARCHAR(255) NOT NULL,
|
|
||||||
filename VARCHAR(255) NOT NULL
|
|
||||||
)
|
|
||||||
""",
|
|
||||||
"CREATE UNIQUE INDEX ON text_files (package,filename)",
|
|
||||||
"""DROP TABLE IF EXISTS lfs_oid_in_package""",
|
|
||||||
"""
|
|
||||||
CREATE TABLE lfs_oid_in_package (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
lfs_oid_id INTEGER NOT NULL,
|
|
||||||
package VARCHAR(255) NOT NULL,
|
|
||||||
filename VARCHAR(255) NOT NULL
|
|
||||||
)
|
|
||||||
""",
|
|
||||||
"CREATE INDEX ON text_files(package)",
|
|
||||||
"CREATE INDEX ON lfs_oid_in_package(package)",
|
|
||||||
"UPDATE scheme SET version=22",
|
|
||||||
)
|
|
||||||
schema_version = self.schema_version()
|
schema_version = self.schema_version()
|
||||||
if (schema_version + 1) not in schemes:
|
if (schema_version + 1) not in schemes:
|
||||||
return
|
return
|
||||||
@ -270,7 +228,7 @@ class DB:
|
|||||||
cur.execute(command)
|
cur.execute(command)
|
||||||
# commit the changes
|
# commit the changes
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
except (Exception, psycopg.DatabaseError) as error:
|
except (Exception, psycopg2.DatabaseError) as error:
|
||||||
print(error)
|
print(error)
|
||||||
self.close()
|
self.close()
|
||||||
raise error
|
raise error
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
from pathlib import Path
|
from typing import Optional
|
||||||
|
|
||||||
from lib.db import DB
|
from lib.db import DB
|
||||||
from lib.obs_revision import OBSRevision
|
|
||||||
from lib.request import Request
|
from lib.request import Request
|
||||||
|
|
||||||
|
|
||||||
class DBRevision:
|
class DBRevision:
|
||||||
def __init__(self, db: DB, row: tuple):
|
def __init__(self, row):
|
||||||
# need to stay in sync with the schema creation in db.py
|
# need to stay in sync with the schema creation in db.py
|
||||||
(
|
(
|
||||||
self.dbid,
|
self.dbid,
|
||||||
@ -25,12 +25,9 @@ class DBRevision:
|
|||||||
self.request_number,
|
self.request_number,
|
||||||
self.request_id,
|
self.request_id,
|
||||||
self.files_hash,
|
self.files_hash,
|
||||||
self.api_url,
|
|
||||||
) = row
|
) = row
|
||||||
self.rev = float(self.rev)
|
self.rev = float(self.rev)
|
||||||
self._files = None
|
self._files = None
|
||||||
self.db = db
|
|
||||||
self.git_commit = None
|
|
||||||
|
|
||||||
def short_string(self):
|
def short_string(self):
|
||||||
return f"{self.project}/{self.package}/{self.rev}"
|
return f"{self.project}/{self.package}/{self.rev}"
|
||||||
@ -51,29 +48,7 @@ class DBRevision:
|
|||||||
return self.package < other.package
|
return self.package < other.package
|
||||||
return self.rev < other.rev
|
return self.rev < other.rev
|
||||||
|
|
||||||
def request_accept_message(self):
|
def as_dict(self, db):
|
||||||
request = Request.find(self.db, self.request_id)
|
|
||||||
msg = f"Accepting request {request.number} from {request.source_project}\n\n"
|
|
||||||
msg += self.comment.strip()
|
|
||||||
url = self.api_url.replace("api.", "build.")
|
|
||||||
msg += f"\n\nOBS-URL: {url}/request/show/{self.request_number}"
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def git_commit_message(self):
|
|
||||||
msg = ""
|
|
||||||
if self.request_id:
|
|
||||||
msg = self.request_accept_message()
|
|
||||||
else:
|
|
||||||
msg = self.comment.strip() + "\n"
|
|
||||||
url = self.api_url.replace("api.", "build.")
|
|
||||||
if self.rev == int(self.rev):
|
|
||||||
# do not link to fake revisions
|
|
||||||
msg += f"\nOBS-URL: {url}/package/show/{self.project}/{self.package}?expand=0&rev={int(self.rev)}"
|
|
||||||
else:
|
|
||||||
msg += f"\nOBS-URL: {url}/package/show/{self.project}/{self.package}?expand=0&rev={self.expanded_srcmd5}"
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def as_dict(self):
|
|
||||||
"""Return a dict we can put into YAML for test cases"""
|
"""Return a dict we can put into YAML for test cases"""
|
||||||
ret = {
|
ret = {
|
||||||
"project": self.project,
|
"project": self.project,
|
||||||
@ -85,27 +60,26 @@ class DBRevision:
|
|||||||
"comment": self.comment,
|
"comment": self.comment,
|
||||||
"broken": self.broken,
|
"broken": self.broken,
|
||||||
"expanded_srcmd5": self.expanded_srcmd5,
|
"expanded_srcmd5": self.expanded_srcmd5,
|
||||||
"api_url": self.api_url,
|
|
||||||
"files_hash": self.files_hash,
|
"files_hash": self.files_hash,
|
||||||
"files": self.files_list(),
|
"files": self.files_list(db),
|
||||||
}
|
}
|
||||||
if self.request_id:
|
if self.request_id:
|
||||||
ret["request"] = Request.find(self.db, self.request_id).as_dict()
|
ret["request"] = Request.find(db, self.request_id).as_dict()
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def links_to(self, project: str, package: str) -> None:
|
def links_to(self, db, project, package):
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"INSERT INTO links (revision_id, project, package) VALUES (%s,%s,%s)",
|
"INSERT INTO links (revision_id, project, package) VALUES (%s,%s,%s)",
|
||||||
(self.dbid, project, package),
|
(self.dbid, project, package),
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
def import_obs_rev(db: DB, revision: OBSRevision):
|
def import_obs_rev(cls, db, revision):
|
||||||
with db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""INSERT INTO revisions (project, package, rev, unexpanded_srcmd5, commit_time, userid, comment, request_number, api_url)
|
"""INSERT INTO revisions (project, package, rev, unexpanded_srcmd5, commit_time, userid, comment, request_number)
|
||||||
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s)""",
|
VALUES(%s, %s, %s, %s, %s, %s, %s, %s)""",
|
||||||
(
|
(
|
||||||
revision.project,
|
revision.project,
|
||||||
revision.package,
|
revision.package,
|
||||||
@ -115,17 +89,12 @@ class DBRevision:
|
|||||||
revision.userid,
|
revision.userid,
|
||||||
revision.comment,
|
revision.comment,
|
||||||
revision.request_number,
|
revision.request_number,
|
||||||
revision.obs.url,
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
return DBRevision.fetch_revision(
|
return cls.fetch_revision(db, revision.project, revision.package, revision.rev)
|
||||||
db, revision.project, revision.package, revision.rev
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fetch_revision(db, project, package, rev):
|
def fetch_revision(db, project, package, rev):
|
||||||
"""Technically we would need the api_url as well, but we assume projects are unique
|
|
||||||
(e.g. not importing SLE from obs)"""
|
|
||||||
with db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"SELECT * FROM revisions where project=%s and package=%s and rev=%s",
|
"SELECT * FROM revisions where project=%s and package=%s and rev=%s",
|
||||||
@ -133,21 +102,16 @@ class DBRevision:
|
|||||||
)
|
)
|
||||||
row = cur.fetchone()
|
row = cur.fetchone()
|
||||||
if row:
|
if row:
|
||||||
return DBRevision(db, row)
|
return DBRevision(row)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def max_rev(db, project, package):
|
def latest_revision(db, project, package):
|
||||||
with db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"SELECT MAX(rev) FROM revisions where project=%s and package=%s",
|
"SELECT MAX(rev) FROM revisions where project=%s and package=%s",
|
||||||
(project, package),
|
(project, package),
|
||||||
)
|
)
|
||||||
return cur.fetchone()[0]
|
max = cur.fetchone()[0]
|
||||||
return None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def latest_revision(db, project, package):
|
|
||||||
max = DBRevision.max_rev(db, project, package)
|
|
||||||
if max:
|
if max:
|
||||||
return DBRevision.fetch_revision(db, project, package, max)
|
return DBRevision.fetch_revision(db, project, package, max)
|
||||||
return None
|
return None
|
||||||
@ -161,13 +125,13 @@ class DBRevision:
|
|||||||
)
|
)
|
||||||
ret = []
|
ret = []
|
||||||
for row in cur.fetchall():
|
for row in cur.fetchall():
|
||||||
ret.append(DBRevision(db, row))
|
ret.append(DBRevision(row))
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def linked_rev(self):
|
def linked_rev(self, db):
|
||||||
if self.broken:
|
if self.broken:
|
||||||
return None
|
return None
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"SELECT project,package FROM links where revision_id=%s", (self.dbid,)
|
"SELECT project,package FROM links where revision_id=%s", (self.dbid,)
|
||||||
)
|
)
|
||||||
@ -179,31 +143,24 @@ class DBRevision:
|
|||||||
"SELECT * FROM revisions where project=%s and package=%s and commit_time <= %s ORDER BY commit_time DESC LIMIT 1",
|
"SELECT * FROM revisions where project=%s and package=%s and commit_time <= %s ORDER BY commit_time DESC LIMIT 1",
|
||||||
(project, package, self.commit_time),
|
(project, package, self.commit_time),
|
||||||
)
|
)
|
||||||
revisions = [DBRevision(self.db, row) for row in cur.fetchall()]
|
revisions = [DBRevision(row) for row in cur.fetchall()]
|
||||||
if revisions:
|
if revisions:
|
||||||
return revisions[0]
|
return revisions[0]
|
||||||
else:
|
else:
|
||||||
self.set_broken()
|
self.set_broken(db)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def set_broken(self):
|
def set_broken(self, db):
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute("UPDATE revisions SET broken=TRUE where id=%s", (self.dbid,))
|
cur.execute("UPDATE revisions SET broken=TRUE where id=%s", (self.dbid,))
|
||||||
|
|
||||||
def import_dir_list(self, xml):
|
def import_dir_list(self, db, xml):
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"UPDATE revisions SET expanded_srcmd5=%s where id=%s",
|
"UPDATE revisions SET expanded_srcmd5=%s where id=%s",
|
||||||
(xml.get("srcmd5"), self.dbid),
|
(xml.get("srcmd5"), self.dbid),
|
||||||
)
|
)
|
||||||
for entry in xml.findall("entry"):
|
for entry in xml.findall("entry"):
|
||||||
# this file creates easily 100k commits and is just useless data :(
|
|
||||||
# unfortunately it's stored in the same meta package as the project config
|
|
||||||
if (
|
|
||||||
entry.get("name") == "_staging_workflow"
|
|
||||||
and self.package == "_project"
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""INSERT INTO files (name, md5, size, mtime, revision_id)
|
"""INSERT INTO files (name, md5, size, mtime, revision_id)
|
||||||
VALUES (%s,%s,%s,%s,%s)""",
|
VALUES (%s,%s,%s,%s,%s)""",
|
||||||
@ -216,19 +173,15 @@ class DBRevision:
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def previous_commit(self):
|
def previous_commit(self, db):
|
||||||
return DBRevision.fetch_revision(
|
return self.fetch_revision(db, self.project, self.package, int(self.rev) - 1)
|
||||||
self.db, self.project, self.package, int(self.rev) - 1
|
|
||||||
)
|
|
||||||
|
|
||||||
def next_commit(self):
|
def next_commit(self, db):
|
||||||
return DBRevision.fetch_revision(
|
return self.fetch_revision(db, self.project, self.package, int(self.rev) + 1)
|
||||||
self.db, self.project, self.package, int(self.rev) + 1
|
|
||||||
)
|
|
||||||
|
|
||||||
def calculate_files_hash(self):
|
def calculate_files_hash(self, db):
|
||||||
m = md5()
|
m = md5()
|
||||||
for file_dict in self.files_list():
|
for file_dict in self.files_list(db):
|
||||||
m.update(
|
m.update(
|
||||||
(
|
(
|
||||||
file_dict["name"]
|
file_dict["name"]
|
||||||
@ -240,10 +193,10 @@ class DBRevision:
|
|||||||
)
|
)
|
||||||
return m.hexdigest()
|
return m.hexdigest()
|
||||||
|
|
||||||
def files_list(self):
|
def files_list(self, db):
|
||||||
if self._files:
|
if self._files:
|
||||||
return self._files
|
return self._files
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute("SELECT * from files where revision_id=%s", (self.dbid,))
|
cur.execute("SELECT * from files where revision_id=%s", (self.dbid,))
|
||||||
self._files = []
|
self._files = []
|
||||||
for row in cur.fetchall():
|
for row in cur.fetchall():
|
||||||
@ -254,23 +207,27 @@ class DBRevision:
|
|||||||
self._files.sort(key=lambda x: x["name"])
|
self._files.sort(key=lambda x: x["name"])
|
||||||
return self._files
|
return self._files
|
||||||
|
|
||||||
def calc_delta(self, current_rev: DBRevision | None):
|
def calc_delta(self, db: DB, current_rev: Optional[DBRevision]):
|
||||||
"""Calculate the list of files to download and to delete.
|
"""Calculate the list of files to download and to delete.
|
||||||
Param current_rev is the revision that's currently checked out.
|
Param current_rev is the revision that's currently checked out.
|
||||||
If it's None, the repository is empty.
|
If it's None, the repository is empty.
|
||||||
"""
|
"""
|
||||||
to_download = []
|
to_download = []
|
||||||
|
to_delete = []
|
||||||
if current_rev:
|
if current_rev:
|
||||||
old_files = {
|
old_files = {
|
||||||
e["name"]: f"{e['md5']}-{e['size']}" for e in current_rev.files_list()
|
e["name"]: f"{e['md5']}-{e['size']}" for e in current_rev.files_list(db)
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
old_files = dict()
|
old_files = dict()
|
||||||
for entry in self.files_list():
|
for entry in self.files_list(db):
|
||||||
if old_files.get(entry["name"]) != f"{entry['md5']}-{entry['size']}":
|
if old_files.get(entry["name"]) != f"{entry['md5']}-{entry['size']}":
|
||||||
to_download.append((Path(entry["name"]), entry["size"], entry["md5"]))
|
logging.debug(f"Download {entry['name']}")
|
||||||
|
to_download.append((entry["name"], entry["md5"]))
|
||||||
old_files.pop(entry["name"], None)
|
old_files.pop(entry["name"], None)
|
||||||
to_delete = [Path(e) for e in old_files.keys()]
|
for entry in old_files.keys():
|
||||||
|
logging.debug(f"Delete {entry}")
|
||||||
|
to_delete.append(entry)
|
||||||
return to_download, to_delete
|
return to_download, to_delete
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -289,8 +246,8 @@ class DBRevision:
|
|||||||
with db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""INSERT INTO revisions (project, package, rev, unexpanded_srcmd5, expanded_srcmd5,
|
"""INSERT INTO revisions (project, package, rev, unexpanded_srcmd5, expanded_srcmd5,
|
||||||
commit_time, userid, comment, broken, files_hash, api_url)
|
commit_time, userid, comment, broken, files_hash)
|
||||||
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id""",
|
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id""",
|
||||||
(
|
(
|
||||||
rev_dict["project"],
|
rev_dict["project"],
|
||||||
rev_dict["package"],
|
rev_dict["package"],
|
||||||
@ -302,7 +259,6 @@ class DBRevision:
|
|||||||
rev_dict["comment"],
|
rev_dict["comment"],
|
||||||
rev_dict["broken"],
|
rev_dict["broken"],
|
||||||
rev_dict["files_hash"],
|
rev_dict["files_hash"],
|
||||||
rev_dict.get("api_url", "https://api.opensuse.org"),
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
rev_id = cur.fetchone()[0]
|
rev_id = cur.fetchone()[0]
|
||||||
|
@ -9,8 +9,12 @@ class FlatNode:
|
|||||||
self.parent2 = parent2
|
self.parent2 = parent2
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
p1_str = f" p1:{self.parent1.short_string()}" if self.parent1 else ""
|
p1_str = ""
|
||||||
p2_str = f" p2:{self.parent2.short_string()}" if self.parent2 else ""
|
if self.parent1:
|
||||||
|
p1_str = f" p1:{self.parent1.short_string()}"
|
||||||
|
p2_str = ""
|
||||||
|
if self.parent2:
|
||||||
|
p2_str = f" p2:{self.parent2.short_string()}"
|
||||||
return f"{self.branch} c:{self.commit.short_string()}{p1_str}{p2_str}"
|
return f"{self.branch} c:{self.commit.short_string()}{p1_str}{p2_str}"
|
||||||
|
|
||||||
|
|
||||||
@ -32,7 +36,8 @@ class FlatTreeWalker(AbstractWalker):
|
|||||||
def handle_source_node(self, node) -> None:
|
def handle_source_node(self, node) -> None:
|
||||||
if self.rebase_devel and node.parent and node.parent.merged_into:
|
if self.rebase_devel and node.parent and node.parent.merged_into:
|
||||||
self.add("devel", node.revision, node.parent.merged_into.revision)
|
self.add("devel", node.revision, node.parent.merged_into.revision)
|
||||||
elif node.parent:
|
return
|
||||||
|
if node.parent:
|
||||||
self.add("devel", node.revision, node.parent.revision)
|
self.add("devel", node.revision, node.parent.revision)
|
||||||
elif self.last_merge:
|
elif self.last_merge:
|
||||||
self.add("devel", node.revision, self.last_merge.parent.revision)
|
self.add("devel", node.revision, self.last_merge.parent.revision)
|
||||||
|
294
lib/git.py
294
lib/git.py
@ -1,10 +1,9 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import pathlib
|
import pathlib
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import requests
|
import pygit2
|
||||||
|
|
||||||
from lib.binary import BINARY
|
from lib.binary import BINARY
|
||||||
|
|
||||||
@ -19,6 +18,12 @@ class Git:
|
|||||||
self.committer = committer
|
self.committer = committer
|
||||||
self.committer_email = committer_email
|
self.committer_email = committer_email
|
||||||
|
|
||||||
|
self.repo = None
|
||||||
|
|
||||||
|
def is_open(self):
|
||||||
|
return self.repo is not None
|
||||||
|
|
||||||
|
# TODO: Extend it to packages and files
|
||||||
def exists(self):
|
def exists(self):
|
||||||
"""Check if the path is a valid git repository"""
|
"""Check if the path is a valid git repository"""
|
||||||
return (self.path / ".git").exists()
|
return (self.path / ".git").exists()
|
||||||
@ -26,72 +31,36 @@ class Git:
|
|||||||
def create(self):
|
def create(self):
|
||||||
"""Create a local git repository"""
|
"""Create a local git repository"""
|
||||||
self.path.mkdir(parents=True, exist_ok=True)
|
self.path.mkdir(parents=True, exist_ok=True)
|
||||||
self.open()
|
# Convert the path to string, to avoid some limitations in
|
||||||
|
# older pygit2
|
||||||
def git_run(self, args, **kwargs):
|
self.repo = pygit2.init_repository(str(self.path))
|
||||||
"""Run a git command"""
|
return self
|
||||||
if "env" in kwargs:
|
|
||||||
envs = kwargs["env"].copy()
|
|
||||||
del kwargs["env"]
|
|
||||||
else:
|
|
||||||
envs = os.environ.copy()
|
|
||||||
envs["GIT_LFS_SKIP_SMUDGE"] = "1"
|
|
||||||
envs["GIT_CONFIG_GLOBAL"] = "/dev/null"
|
|
||||||
return subprocess.run(
|
|
||||||
["git"] + args,
|
|
||||||
cwd=self.path,
|
|
||||||
check=True,
|
|
||||||
env=envs,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
def open(self):
|
|
||||||
if not self.exists():
|
|
||||||
self.git_run(["init", "--object-format=sha256", "-b", "factory"])
|
|
||||||
self.git_run(["config", "lfs.allowincompletepush", "true"])
|
|
||||||
|
|
||||||
def is_dirty(self):
|
def is_dirty(self):
|
||||||
"""Check if there is something to commit"""
|
"""Check if there is something to commit"""
|
||||||
status_str = self.git_run(
|
assert self.is_open()
|
||||||
["status", "--porcelain=2"],
|
|
||||||
stdout=subprocess.PIPE,
|
return self.repo.status()
|
||||||
).stdout.decode("utf-8")
|
|
||||||
return len(list(filter(None, status_str.split("\n")))) > 0
|
|
||||||
|
|
||||||
def branches(self):
|
def branches(self):
|
||||||
br = (
|
return list(self.repo.branches)
|
||||||
self.git_run(
|
|
||||||
["for-each-ref", "--format=%(refname:short)", "refs/heads/"],
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
)
|
|
||||||
.stdout.decode("utf-8")
|
|
||||||
.split()
|
|
||||||
)
|
|
||||||
if len(br) == 0:
|
|
||||||
br.append("factory") # unborn branch?
|
|
||||||
return br
|
|
||||||
|
|
||||||
def branch(self, branch, commit="HEAD"):
|
def branch(self, branch, commit=None):
|
||||||
commit = (
|
if not commit:
|
||||||
self.git_run(
|
commit = self.repo.head
|
||||||
["rev-parse", "--verify", "--end-of-options", commit + "^{commit}"],
|
else:
|
||||||
stdout=subprocess.PIPE,
|
commit = self.repo.get(commit)
|
||||||
)
|
self.repo.branches.local.create(branch, commit)
|
||||||
.stdout.decode("utf-8")
|
|
||||||
.strip()
|
|
||||||
)
|
|
||||||
return self.git_run(["branch", branch, commit])
|
|
||||||
|
|
||||||
def checkout(self, branch):
|
def checkout(self, branch):
|
||||||
"""Checkout into the branch HEAD"""
|
"""Checkout into the branch HEAD"""
|
||||||
new_branch = False
|
new_branch = False
|
||||||
|
ref = f"refs/heads/{branch}"
|
||||||
if branch not in self.branches():
|
if branch not in self.branches():
|
||||||
self.git_run(["switch", "-q", "--orphan", branch])
|
self.repo.references["HEAD"].set_target(ref)
|
||||||
new_branch = True
|
new_branch = True
|
||||||
else:
|
else:
|
||||||
ref = f"refs/heads/{branch}"
|
self.repo.checkout(ref)
|
||||||
if (self.path / ".git" / ref).exists():
|
|
||||||
self.git_run(["switch", "--no-guess", "-q", branch])
|
|
||||||
return new_branch
|
return new_branch
|
||||||
|
|
||||||
def commit(
|
def commit(
|
||||||
@ -104,8 +73,10 @@ class Git:
|
|||||||
committer=None,
|
committer=None,
|
||||||
committer_email=None,
|
committer_email=None,
|
||||||
committer_time=None,
|
committer_time=None,
|
||||||
|
allow_empty=False,
|
||||||
):
|
):
|
||||||
"""Add all the files and create a new commit in the current HEAD"""
|
"""Add all the files and create a new commit in the current HEAD"""
|
||||||
|
assert allow_empty or self.is_dirty()
|
||||||
|
|
||||||
if not committer:
|
if not committer:
|
||||||
committer = self.committer if self.committer else self.user
|
committer = self.committer if self.committer else self.user
|
||||||
@ -114,80 +85,125 @@ class Git:
|
|||||||
)
|
)
|
||||||
committer_time = committer_time if committer_time else user_time
|
committer_time = committer_time if committer_time else user_time
|
||||||
|
|
||||||
if self.is_dirty():
|
try:
|
||||||
self.git_run(["add", "--all", "."])
|
self.repo.index.add_all()
|
||||||
|
except pygit2.GitError as e:
|
||||||
|
if not allow_empty:
|
||||||
|
raise e
|
||||||
|
|
||||||
tree_id = (
|
self.repo.index.write()
|
||||||
self.git_run(["write-tree"], stdout=subprocess.PIPE)
|
author = pygit2.Signature(user, user_email, int(user_time.timestamp()))
|
||||||
.stdout.decode("utf-8")
|
committer = pygit2.Signature(
|
||||||
.strip()
|
committer, committer_email, int(committer_time.timestamp())
|
||||||
|
)
|
||||||
|
if not parents:
|
||||||
|
try:
|
||||||
|
parents = [self.repo.head.target]
|
||||||
|
except pygit2.GitError as e:
|
||||||
|
parents = []
|
||||||
|
if not allow_empty:
|
||||||
|
raise e
|
||||||
|
|
||||||
|
tree = self.repo.index.write_tree()
|
||||||
|
return self.repo.create_commit(
|
||||||
|
"HEAD", author, committer, message, tree, parents
|
||||||
)
|
)
|
||||||
|
|
||||||
parent_array = []
|
def merge(
|
||||||
if isinstance(parents, list):
|
self,
|
||||||
for parent in filter(None, parents):
|
user,
|
||||||
parent_array = parent_array + ["-p", parent]
|
user_email,
|
||||||
elif isinstance(parents, str):
|
user_time,
|
||||||
parent_array = ["-p", parents]
|
message,
|
||||||
|
commit,
|
||||||
|
committer=None,
|
||||||
|
committer_email=None,
|
||||||
|
committer_time=None,
|
||||||
|
clean_on_conflict=True,
|
||||||
|
merged=False,
|
||||||
|
allow_empty=False,
|
||||||
|
):
|
||||||
|
new_branch = False
|
||||||
|
|
||||||
commit_id = (
|
if not merged:
|
||||||
self.git_run(
|
try:
|
||||||
["commit-tree"] + parent_array + [tree_id],
|
self.repo.merge(commit)
|
||||||
env={
|
except KeyError:
|
||||||
"GIT_AUTHOR_NAME": user,
|
# If it is the first commit, we will have a missing
|
||||||
"GIT_AUTHOR_EMAIL": user_email,
|
# "HEAD", but the files will be there. We can proceed
|
||||||
"GIT_AUTHOR_DATE": f"{int(user_time.timestamp())} +0000",
|
# to the commit directly.
|
||||||
"GIT_COMMITTER_NAME": committer,
|
new_branch = True
|
||||||
"GIT_COMMITTER_EMAIL": committer_email,
|
|
||||||
"GIT_COMMITTER_DATE": f"{int(committer_time.timestamp())} +0000",
|
|
||||||
},
|
|
||||||
input=message.encode("utf-8"),
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
)
|
|
||||||
.stdout.decode("utf-8")
|
|
||||||
.rstrip()
|
|
||||||
)
|
|
||||||
self.git_run(["reset", "--soft", commit_id])
|
|
||||||
return commit_id
|
|
||||||
|
|
||||||
def branch_head(self, branch="HEAD"):
|
if not merged and self.repo.index.conflicts:
|
||||||
return (
|
for conflict in self.repo.index.conflicts:
|
||||||
self.git_run(
|
conflict = [c for c in conflict if c]
|
||||||
["rev-parse", "--verify", "--end-of-options", branch],
|
if conflict:
|
||||||
stdout=subprocess.PIPE,
|
logging.info(f"CONFLICT {conflict[0].path}")
|
||||||
)
|
|
||||||
.stdout.decode("utf-8")
|
if clean_on_conflict:
|
||||||
.strip()
|
self.clean()
|
||||||
|
# Now I miss Rust enums
|
||||||
|
return "CONFLICT"
|
||||||
|
|
||||||
|
# Some merges are empty in OBS (no changes, not sure
|
||||||
|
# why), for now we signal them
|
||||||
|
if not allow_empty and not self.is_dirty():
|
||||||
|
# I really really do miss Rust enums
|
||||||
|
return "EMPTY"
|
||||||
|
|
||||||
|
if new_branch:
|
||||||
|
parents = [commit]
|
||||||
|
else:
|
||||||
|
parents = [
|
||||||
|
self.repo.head.target,
|
||||||
|
commit,
|
||||||
|
]
|
||||||
|
commit = self.commit(
|
||||||
|
user,
|
||||||
|
user_email,
|
||||||
|
user_time,
|
||||||
|
message,
|
||||||
|
parents,
|
||||||
|
committer,
|
||||||
|
committer_email,
|
||||||
|
committer_time,
|
||||||
|
allow_empty=allow_empty,
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_branch_head(self, branch, commit):
|
return commit
|
||||||
return self.git_run(["update-ref", f"refs/heads/{branch}", commit])
|
|
||||||
|
def merge_abort(self):
|
||||||
|
self.repo.state_cleanup()
|
||||||
|
|
||||||
|
def last_commit(self):
|
||||||
|
try:
|
||||||
|
return self.repo.head.target
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def branch_head(self, branch):
|
||||||
|
return self.repo.references["refs/heads/" + branch].target
|
||||||
|
|
||||||
def gc(self):
|
def gc(self):
|
||||||
logging.debug(f"Garbage recollect and repackage {self.path}")
|
logging.info(f"Garbage recollect and repackage {self.path}")
|
||||||
self.git_run(
|
subprocess.run(
|
||||||
["gc", "--auto"],
|
["git", "gc", "--auto"],
|
||||||
|
cwd=self.path,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.STDOUT,
|
stderr=subprocess.STDOUT,
|
||||||
)
|
)
|
||||||
|
|
||||||
# def clean(self):
|
def clean(self):
|
||||||
# for path, _ in self.repo.status().items():
|
for path, _ in self.repo.status().items():
|
||||||
# logging.debug(f"Cleaning {path}")
|
logging.debug(f"Cleaning {path}")
|
||||||
# try:
|
try:
|
||||||
# (self.path / path).unlink()
|
(self.path / path).unlink()
|
||||||
# self.repo.index.remove(path)
|
self.repo.index.remove(path)
|
||||||
# except Exception as e:
|
except Exception as e:
|
||||||
# logging.warning(f"Error removing file {path}: {e}")
|
logging.warning(f"Error removing file {path}: {e}")
|
||||||
|
|
||||||
def add(self, filename):
|
def add(self, filename):
|
||||||
self.git_run(["add", ":(literal)" + str(filename)])
|
self.repo.index.add(filename)
|
||||||
|
|
||||||
def add_default_gitignore(self):
|
|
||||||
if not (self.path / ".gitignore").exists():
|
|
||||||
with (self.path / ".gitignore").open("w") as f:
|
|
||||||
f.write(".osc\n")
|
|
||||||
self.add(".gitignore")
|
|
||||||
|
|
||||||
def add_default_lfs_gitattributes(self, force=False):
|
def add_default_lfs_gitattributes(self, force=False):
|
||||||
if not (self.path / ".gitattributes").exists() or force:
|
if not (self.path / ".gitattributes").exists() or force:
|
||||||
@ -240,49 +256,11 @@ class Git:
|
|||||||
)
|
)
|
||||||
return any(fnmatch.fnmatch(filename, line) for line in patterns)
|
return any(fnmatch.fnmatch(filename, line) for line in patterns)
|
||||||
|
|
||||||
def remove(self, file: pathlib.Path):
|
def remove(self, filename):
|
||||||
self.git_run(
|
self.repo.index.remove(filename)
|
||||||
["rm", "-q", "-f", "--ignore-unmatch", ":(literal)" + file.name],
|
(self.path / filename).unlink()
|
||||||
)
|
|
||||||
patterns = self.get_specific_lfs_gitattributes()
|
patterns = self.get_specific_lfs_gitattributes()
|
||||||
if file.name in patterns:
|
if filename in patterns:
|
||||||
patterns.remove(file.name)
|
patterns.remove(filename)
|
||||||
self.add_specific_lfs_gitattributes(patterns)
|
self.add_specific_lfs_gitattributes(patterns)
|
||||||
|
|
||||||
def add_gitea_remote(self, package):
|
|
||||||
repo_name = package.replace("+", "_")
|
|
||||||
org_name = "pool"
|
|
||||||
|
|
||||||
if not os.getenv("GITEA_TOKEN"):
|
|
||||||
logging.warning("Not adding a remote due to missing $GITEA_TOKEN")
|
|
||||||
return
|
|
||||||
|
|
||||||
url = f"https://src.opensuse.org/api/v1/org/{org_name}/repos"
|
|
||||||
response = requests.post(
|
|
||||||
url,
|
|
||||||
data={"name": repo_name, "object_format_name": "sha256"},
|
|
||||||
headers={"Authorization": f"token {os.getenv('GITEA_TOKEN')}"},
|
|
||||||
timeout=10,
|
|
||||||
)
|
|
||||||
# 409 Conflict (Already existing)
|
|
||||||
# 201 Created
|
|
||||||
if response.status_code not in (201, 409):
|
|
||||||
print(response.data)
|
|
||||||
url = f"gitea@src.opensuse.org:{org_name}/{repo_name}.git"
|
|
||||||
self.git_run(
|
|
||||||
["remote", "add", "origin", url],
|
|
||||||
)
|
|
||||||
|
|
||||||
def push(self, force=False):
|
|
||||||
if "origin" not in self.git_run(
|
|
||||||
["remote"],
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
).stdout.decode("utf-8"):
|
|
||||||
logging.warning("Not pushing to remote because no 'origin' configured")
|
|
||||||
return
|
|
||||||
|
|
||||||
cmd = ["push"]
|
|
||||||
if force:
|
|
||||||
cmd.append("-f")
|
|
||||||
cmd += ["origin", "--all"]
|
|
||||||
self.git_run(cmd)
|
|
||||||
|
@ -6,48 +6,99 @@ import yaml
|
|||||||
from lib.binary import is_binary_or_large
|
from lib.binary import is_binary_or_large
|
||||||
from lib.db import DB
|
from lib.db import DB
|
||||||
from lib.git import Git
|
from lib.git import Git
|
||||||
from lib.lfs_oid import LFSOid
|
|
||||||
from lib.obs import OBS
|
from lib.obs import OBS
|
||||||
from lib.proxy_sha256 import ProxySHA256
|
from lib.proxy_sha256 import ProxySHA256, md5
|
||||||
from lib.tree_builder import TreeBuilder
|
from lib.tree_builder import TreeBuilder
|
||||||
from lib.user import User
|
|
||||||
|
|
||||||
|
|
||||||
class GitExporter:
|
class GitExporter:
|
||||||
def __init__(self, api_url, project, package, repodir, cachedir):
|
def __init__(self, api_url, project, package, repodir):
|
||||||
self.obs = OBS(api_url)
|
self.obs = OBS()
|
||||||
self.project = project
|
self.project = project
|
||||||
self.package = package
|
self.package = package
|
||||||
self.db = DB()
|
# TODO: Store the api url in the revision
|
||||||
self.proxy_sha256 = ProxySHA256(self.obs, self.db)
|
self.obs.change_url(api_url)
|
||||||
|
self.proxy_sha256 = ProxySHA256(self.obs, enabled=True)
|
||||||
self.git = Git(
|
self.git = Git(
|
||||||
repodir / package,
|
repodir,
|
||||||
committer="Git OBS Bridge",
|
committer="Git OBS Bridge",
|
||||||
committer_email="obsbridge@suse.de",
|
committer_email="obsbridge@suse.de",
|
||||||
)
|
).create()
|
||||||
if self.git.exists():
|
|
||||||
self.git.open()
|
|
||||||
else:
|
|
||||||
self.git.create()
|
|
||||||
self.git.add_gitea_remote(package)
|
|
||||||
self.state_file = os.path.join(self.git.path, ".git", "_flat_state.yaml")
|
self.state_file = os.path.join(self.git.path, ".git", "_flat_state.yaml")
|
||||||
self.gc_interval = 200
|
self.gc_interval = 200
|
||||||
self.cachedir = cachedir
|
|
||||||
|
def download(self, revision):
|
||||||
|
obs_files = self.obs.files(revision.project, revision.package, revision.srcmd5)
|
||||||
|
git_files = {
|
||||||
|
(f.name, f.stat().st_size, md5(f))
|
||||||
|
for f in self.git.path.iterdir()
|
||||||
|
if f.is_file() and f.name not in (".gitattributes")
|
||||||
|
}
|
||||||
|
|
||||||
|
# Overwrite ".gitattributes" with the
|
||||||
|
self.git.add_default_lfs_gitattributes(force=True)
|
||||||
|
|
||||||
|
# Download each file in OBS if it is not a binary (or large)
|
||||||
|
# file
|
||||||
|
for (name, size, file_md5) in obs_files:
|
||||||
|
# this file creates easily 100k commits and is just useless data :(
|
||||||
|
# unfortunately it's stored in the same meta package as the project config
|
||||||
|
if revision.package == "_project" and name == "_staging_workflow":
|
||||||
|
continue
|
||||||
|
# have such files been detected as text mimetype before?
|
||||||
|
is_text = self.proxy_sha256.is_text(name)
|
||||||
|
if not is_text and is_binary_or_large(name, size):
|
||||||
|
file_sha256 = self.proxy_sha256.get_or_put(
|
||||||
|
revision.project,
|
||||||
|
revision.package,
|
||||||
|
name,
|
||||||
|
revision.srcmd5,
|
||||||
|
file_md5,
|
||||||
|
size,
|
||||||
|
)
|
||||||
|
self.git.add_lfs(name, file_sha256["sha256"], size)
|
||||||
|
else:
|
||||||
|
if (name, size, file_md5) not in git_files:
|
||||||
|
logging.debug(f"Download {name}")
|
||||||
|
self.obs.download(
|
||||||
|
revision.project,
|
||||||
|
revision.package,
|
||||||
|
name,
|
||||||
|
revision.srcmd5,
|
||||||
|
self.git.path,
|
||||||
|
file_md5=file_md5,
|
||||||
|
)
|
||||||
|
# Validate the MD5 of the downloaded file
|
||||||
|
if md5(self.git.path / name) != file_md5:
|
||||||
|
raise Exception(f"Download error in {name}")
|
||||||
|
self.git.add(name)
|
||||||
|
|
||||||
|
# Remove extra files
|
||||||
|
obs_names = {n for (n, _, _) in obs_files}
|
||||||
|
git_names = {n for (n, _, _) in git_files}
|
||||||
|
for name in git_names - obs_names:
|
||||||
|
logging.debug(f"Remove {name}")
|
||||||
|
self.git.remove(name)
|
||||||
|
|
||||||
def set_gc_interval(self, gc):
|
def set_gc_interval(self, gc):
|
||||||
self.gc_interval = gc
|
self.gc_interval = gc
|
||||||
|
|
||||||
def check_repo_state(self, flats, branch_state):
|
def export_as_git(self):
|
||||||
|
db = DB()
|
||||||
|
tree = TreeBuilder(db).build(self.project, self.package)
|
||||||
|
flats = tree.as_flat_list()
|
||||||
|
|
||||||
|
branch_state = {"factory": None, "devel": None}
|
||||||
state_data = dict()
|
state_data = dict()
|
||||||
if os.path.exists(self.state_file):
|
if os.path.exists(self.state_file):
|
||||||
with open(self.state_file) as f:
|
with open(self.state_file, "r") as f:
|
||||||
state_data = yaml.safe_load(f)
|
state_data = yaml.safe_load(f)
|
||||||
if not isinstance(state_data, dict):
|
if type(state_data) != dict:
|
||||||
state_data = {}
|
state_data = {}
|
||||||
left_to_commit = []
|
left_to_commit = []
|
||||||
for flat in reversed(flats):
|
for flat in reversed(flats):
|
||||||
found_state = False
|
found_state = False
|
||||||
for branch in ["factory"]:
|
for branch in ["factory", "devel"]:
|
||||||
if flat.commit.dbid == state_data.get(branch):
|
if flat.commit.dbid == state_data.get(branch):
|
||||||
branch_state[branch] = flat.commit
|
branch_state[branch] = flat.commit
|
||||||
flat.commit.git_commit = self.git.branch_head(branch)
|
flat.commit.git_commit = self.git.branch_head(branch)
|
||||||
@ -58,116 +109,55 @@ class GitExporter:
|
|||||||
found_state = True
|
found_state = True
|
||||||
if not found_state:
|
if not found_state:
|
||||||
left_to_commit.append(flat)
|
left_to_commit.append(flat)
|
||||||
return left_to_commit
|
|
||||||
|
|
||||||
def export_as_git(self):
|
gc_cnt = self.gc_interval
|
||||||
if os.getenv("CHECK_ALL_LFS"):
|
if len(left_to_commit) > 0:
|
||||||
LFSOid.check_all(self.db, self.package)
|
|
||||||
tree = TreeBuilder(self.db).build(self.project, self.package)
|
|
||||||
flats = tree.as_flat_list()
|
|
||||||
|
|
||||||
branch_state = {"factory": None, "devel": None}
|
|
||||||
left_to_commit = self.check_repo_state(flats, branch_state)
|
|
||||||
|
|
||||||
if not left_to_commit:
|
|
||||||
return
|
|
||||||
|
|
||||||
logging.info(f"Commiting into {self.git.path}")
|
|
||||||
self.run_gc()
|
|
||||||
users = dict()
|
|
||||||
|
|
||||||
for flat in left_to_commit:
|
|
||||||
if flat.commit.userid not in users:
|
|
||||||
users[flat.commit.userid] = User.find(self.db, flat.commit.userid)
|
|
||||||
flat.user = users[flat.commit.userid]
|
|
||||||
self.gc_cnt -= 1
|
|
||||||
if self.gc_cnt <= 0 and self.gc_interval:
|
|
||||||
self.run_gc()
|
|
||||||
logging.debug(f"Committing {flat}")
|
|
||||||
self.commit_flat(flat, branch_state)
|
|
||||||
|
|
||||||
# make sure that we create devel branch
|
|
||||||
if not branch_state["devel"]:
|
|
||||||
logging.debug("force creating devel")
|
|
||||||
self.git.set_branch_head("devel", self.git.branch_head("factory"))
|
|
||||||
|
|
||||||
self.git.push(force=True)
|
|
||||||
|
|
||||||
def run_gc(self):
|
|
||||||
self.gc_cnt = self.gc_interval
|
|
||||||
self.git.gc()
|
self.git.gc()
|
||||||
|
for flat in left_to_commit:
|
||||||
|
gc_cnt -= 1
|
||||||
|
if gc_cnt <= 0 and self.gc_interval:
|
||||||
|
self.git.gc()
|
||||||
|
gc_cnt = self.gc_interval
|
||||||
|
logging.debug(f"Committing {flat}")
|
||||||
|
self.commit_flat(db, flat, branch_state)
|
||||||
|
|
||||||
def is_lfs_file(self, package, filename, size):
|
def limit_download(self, file):
|
||||||
if not is_binary_or_large(filename, size):
|
if file.endswith(".spec") or file.endswith(".changes"):
|
||||||
|
return True
|
||||||
return False
|
return False
|
||||||
return not self.proxy_sha256.is_text(package, filename)
|
|
||||||
|
|
||||||
def commit_file(self, flat, file, size, md5):
|
def commit_flat(self, db, flat, branch_state):
|
||||||
# have such files been detected as text mimetype before?
|
parents = []
|
||||||
if self.is_lfs_file(flat.commit.package, file.name, size):
|
self.git.checkout(flat.branch)
|
||||||
file_sha256 = self.proxy_sha256.get_or_put(
|
if flat.parent1:
|
||||||
flat.commit.project,
|
parents.append(flat.parent1.git_commit)
|
||||||
flat.commit.package,
|
if flat.parent2:
|
||||||
file.name,
|
parents.append(flat.parent2.git_commit)
|
||||||
flat.commit.expanded_srcmd5,
|
to_download, to_delete = flat.commit.calc_delta(db, branch_state[flat.branch])
|
||||||
md5,
|
for file in to_delete:
|
||||||
size,
|
if not self.limit_download(file):
|
||||||
)
|
continue
|
||||||
# as it's newly registered, it might be a text file now, so double check
|
self.git.remove(file)
|
||||||
if not self.proxy_sha256.is_text(flat.commit.package, file.name):
|
for file, md5 in to_download:
|
||||||
self.git.add_lfs(file.name, file_sha256, size)
|
if not self.limit_download(file):
|
||||||
return
|
continue
|
||||||
self.commit_non_lfs_file(flat, file, md5)
|
|
||||||
|
|
||||||
def commit_non_lfs_file(self, flat, file, md5):
|
|
||||||
self.obs.change_url(flat.commit.api_url)
|
|
||||||
self.obs.download(
|
self.obs.download(
|
||||||
flat.commit.project,
|
flat.commit.project,
|
||||||
flat.commit.package,
|
flat.commit.package,
|
||||||
file.name,
|
file,
|
||||||
flat.commit.expanded_srcmd5,
|
flat.commit.expanded_srcmd5,
|
||||||
self.git.path,
|
self.git.path,
|
||||||
self.cachedir,
|
|
||||||
file_md5=md5,
|
file_md5=md5,
|
||||||
)
|
)
|
||||||
self.git.add(file)
|
self.git.add(file)
|
||||||
|
|
||||||
def branch_fits_parent1(self, flat, branch_state):
|
|
||||||
if branch_state[flat.branch] is None:
|
|
||||||
# everything fits nothing
|
|
||||||
return True
|
|
||||||
return flat.parent1 == branch_state[flat.branch]
|
|
||||||
|
|
||||||
def commit_flat(self, flat, branch_state):
|
|
||||||
parents = []
|
|
||||||
self.git.checkout(flat.branch)
|
|
||||||
|
|
||||||
if flat.parent1:
|
|
||||||
if not self.branch_fits_parent1(flat, branch_state):
|
|
||||||
logging.debug(f"Reset {flat.branch} onto {flat.parent1.short_string()}")
|
|
||||||
assert flat.parent1.git_commit
|
|
||||||
self.git.set_branch_head(flat.branch, flat.parent1.git_commit)
|
|
||||||
self.git.checkout(flat.branch)
|
|
||||||
parents.append(flat.parent1.git_commit)
|
|
||||||
if flat.parent2:
|
|
||||||
assert flat.parent2.git_commit
|
|
||||||
parents.append(flat.parent2.git_commit)
|
|
||||||
|
|
||||||
# create file if not existant
|
|
||||||
self.git.add_default_lfs_gitattributes(force=False)
|
|
||||||
self.git.add_default_gitignore()
|
|
||||||
|
|
||||||
to_download, to_delete = flat.commit.calc_delta(branch_state[flat.branch])
|
|
||||||
for file in to_delete:
|
|
||||||
self.git.remove(file)
|
|
||||||
for file, size, md5 in to_download:
|
|
||||||
self.commit_file(flat, file, size, md5)
|
|
||||||
|
|
||||||
commit = self.git.commit(
|
commit = self.git.commit(
|
||||||
flat.user.realname,
|
f"OBS User {flat.commit.userid}",
|
||||||
flat.user.email,
|
"null@suse.de",
|
||||||
flat.commit.commit_time,
|
flat.commit.commit_time,
|
||||||
flat.commit.git_commit_message(),
|
# TODO: Normalize better the commit message
|
||||||
|
f"{flat.commit.comment}\n\n{flat.commit}",
|
||||||
|
allow_empty=True,
|
||||||
parents=parents,
|
parents=parents,
|
||||||
)
|
)
|
||||||
flat.commit.git_commit = commit
|
flat.commit.git_commit = commit
|
||||||
|
20
lib/hash.py
20
lib/hash.py
@ -1,20 +0,0 @@
|
|||||||
import functools
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
|
|
||||||
def _hash(hash_alg, file_or_path):
|
|
||||||
h = hash_alg()
|
|
||||||
|
|
||||||
def __hash(f):
|
|
||||||
while chunk := f.read(1024 * 4):
|
|
||||||
h.update(chunk)
|
|
||||||
|
|
||||||
if hasattr(file_or_path, "read"):
|
|
||||||
__hash(file_or_path)
|
|
||||||
else:
|
|
||||||
with file_or_path.open("rb") as f:
|
|
||||||
__hash(f)
|
|
||||||
return h.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
md5 = functools.partial(_hash, hashlib.md5)
|
|
193
lib/importer.py
193
lib/importer.py
@ -1,5 +1,4 @@
|
|||||||
import logging
|
import logging
|
||||||
import pathlib
|
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
from lib.db import DB
|
from lib.db import DB
|
||||||
@ -9,65 +8,46 @@ from lib.obs_revision import OBSRevision
|
|||||||
from lib.user import User
|
from lib.user import User
|
||||||
|
|
||||||
|
|
||||||
def refresh_package(importer, project, package):
|
|
||||||
importer.refresh_package(project, package)
|
|
||||||
|
|
||||||
|
|
||||||
def import_request(importer, number):
|
|
||||||
importer.import_request(number)
|
|
||||||
|
|
||||||
|
|
||||||
def import_rev(importer, rev):
|
|
||||||
importer.import_rev(rev)
|
|
||||||
|
|
||||||
|
|
||||||
class Importer:
|
class Importer:
|
||||||
def __init__(self, api_url, project, packages):
|
def __init__(self, api_url, project, package):
|
||||||
# Import multiple Factory packages into the database
|
# Import a Factory package into the database
|
||||||
self.packages = packages
|
self.package = package
|
||||||
self.project = project
|
self.project = project
|
||||||
self.scmsync_cache = dict()
|
|
||||||
self.packages_with_scmsync = set()
|
|
||||||
|
|
||||||
self.db = DB()
|
self.obs = OBS()
|
||||||
self.obs = OBS(api_url)
|
assert project == "openSUSE:Factory"
|
||||||
assert not self.has_scmsync(project)
|
self.obs.change_url(api_url)
|
||||||
self.refreshed_packages = set()
|
self.refreshed_packages = set()
|
||||||
self.gone_packages_set = None
|
|
||||||
|
|
||||||
|
def update_db_package(self, db, project, package):
|
||||||
def import_request(self, number):
|
|
||||||
self.obs.request(number).import_into_db(self.db)
|
|
||||||
|
|
||||||
def update_db_package(self, project, package):
|
|
||||||
root = self.obs._history(project, package)
|
root = self.obs._history(project, package)
|
||||||
if root is None:
|
if root is None:
|
||||||
return
|
return
|
||||||
latest = DBRevision.max_rev(self.db, project, package)
|
latest = DBRevision.latest_revision(db, project, package)
|
||||||
for r in root.findall("revision"):
|
for r in root.findall("revision"):
|
||||||
rev = OBSRevision(self.obs, project, package).parse(r)
|
rev = OBSRevision(self.obs, project, package).parse(r)
|
||||||
if not latest or rev.rev > latest:
|
if not latest or rev.rev > latest.rev:
|
||||||
dbrev = DBRevision.import_obs_rev(self.db, rev)
|
dbrev = DBRevision.import_obs_rev(db, rev)
|
||||||
try:
|
try:
|
||||||
root = rev.read_link()
|
root = rev.read_link()
|
||||||
except ET.ParseError:
|
except ET.ParseError:
|
||||||
dbrev.set_broken()
|
dbrev.set_broken(db)
|
||||||
continue
|
continue
|
||||||
if root is not None:
|
if root is not None:
|
||||||
tprj = root.get("project") or project
|
tprj = root.get("project") or project
|
||||||
tpkg = root.get("package") or package
|
tpkg = root.get("package") or package
|
||||||
dbrev.links_to(tprj, tpkg)
|
dbrev.links_to(db, tprj, tpkg)
|
||||||
|
|
||||||
def find_linked_revs(self):
|
def find_linked_revs(self, db):
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""SELECT * from revisions WHERE id in (SELECT l.revision_id FROM links l
|
"""SELECT * from revisions WHERE id in (SELECT l.revision_id FROM links l
|
||||||
LEFT JOIN linked_revs lrevs ON lrevs.revision_id=l.revision_id
|
LEFT JOIN linked_revs lrevs ON lrevs.revision_id=l.revision_id
|
||||||
WHERE lrevs.id IS NULL) and broken is FALSE;"""
|
WHERE lrevs.id IS NULL) and broken is FALSE;"""
|
||||||
)
|
)
|
||||||
for row in cur.fetchall():
|
for row in cur.fetchall():
|
||||||
rev = DBRevision(self.db, row)
|
rev = DBRevision(row)
|
||||||
linked_rev = rev.linked_rev()
|
linked_rev = rev.linked_rev(db)
|
||||||
if not linked_rev:
|
if not linked_rev:
|
||||||
logging.debug(f"No link {rev}")
|
logging.debug(f"No link {rev}")
|
||||||
continue
|
continue
|
||||||
@ -77,8 +57,8 @@ class Importer:
|
|||||||
(rev.dbid, linked_rev.dbid),
|
(rev.dbid, linked_rev.dbid),
|
||||||
)
|
)
|
||||||
|
|
||||||
def fetch_all_linked_packages(self, project, package):
|
def fetch_all_linked_packages(self, db, project, package):
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""SELECT DISTINCT l.project, l.package from links l JOIN revisions r
|
"""SELECT DISTINCT l.project, l.package from links l JOIN revisions r
|
||||||
on r.id=l.revision_id WHERE r.project=%s AND r.package=%s""",
|
on r.id=l.revision_id WHERE r.project=%s AND r.package=%s""",
|
||||||
@ -87,26 +67,26 @@ class Importer:
|
|||||||
for row in cur.fetchall():
|
for row in cur.fetchall():
|
||||||
(lproject, lpackage) = row
|
(lproject, lpackage) = row
|
||||||
# recurse
|
# recurse
|
||||||
self.refresh_package(lproject, lpackage)
|
self.refresh_package(db, lproject, lpackage)
|
||||||
|
|
||||||
def find_fake_revisions(self):
|
def find_fake_revisions(self, db):
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"SELECT * from revisions WHERE id in (SELECT linked_id from linked_revs WHERE considered=FALSE)"
|
"SELECT * from revisions WHERE id in (SELECT linked_id from linked_revs WHERE considered=FALSE)"
|
||||||
)
|
)
|
||||||
for row in cur.fetchall():
|
for row in cur.fetchall():
|
||||||
self._find_fake_revision(DBRevision(self.db, row))
|
self._find_fake_revision(db, DBRevision(row))
|
||||||
|
|
||||||
def _find_fake_revision(self, rev):
|
def _find_fake_revision(self, db, rev):
|
||||||
prev = rev.previous_commit()
|
prev = rev.previous_commit(db)
|
||||||
if not prev:
|
if not prev:
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"UPDATE linked_revs SET considered=TRUE where linked_id=%s",
|
"UPDATE linked_revs SET considered=TRUE where linked_id=%s",
|
||||||
(rev.dbid,),
|
(rev.dbid,),
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""SELECT * FROM revisions WHERE id IN
|
"""SELECT * FROM revisions WHERE id IN
|
||||||
(SELECT revision_id from linked_revs WHERE linked_id=%s)
|
(SELECT revision_id from linked_revs WHERE linked_id=%s)
|
||||||
@ -115,8 +95,8 @@ class Importer:
|
|||||||
)
|
)
|
||||||
last_linked = None
|
last_linked = None
|
||||||
for linked in cur.fetchall():
|
for linked in cur.fetchall():
|
||||||
linked = DBRevision(self.db, linked)
|
linked = DBRevision(linked)
|
||||||
nextrev = linked.next_commit()
|
nextrev = linked.next_commit(db)
|
||||||
if nextrev and nextrev.commit_time < rev.commit_time:
|
if nextrev and nextrev.commit_time < rev.commit_time:
|
||||||
continue
|
continue
|
||||||
last_linked = linked
|
last_linked = linked
|
||||||
@ -127,7 +107,7 @@ class Importer:
|
|||||||
if not last_linked:
|
if not last_linked:
|
||||||
return
|
return
|
||||||
|
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
linked = last_linked
|
linked = last_linked
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"SELECT 1 FROM fake_revs where revision_id=%s AND linked_id=%s",
|
"SELECT 1 FROM fake_revs where revision_id=%s AND linked_id=%s",
|
||||||
@ -140,10 +120,10 @@ class Importer:
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
fake_rev = linked.rev + rev.rev / 1000.0
|
fake_rev = linked.rev + rev.rev / 1000.0
|
||||||
comment = f"Updating link to change in {rev.project}/{rev.package} revision {int(rev.rev)}"
|
comment = f"Updating link to change in {rev.project}/{rev.package} revision {rev.rev}"
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""INSERT INTO revisions (project,package,rev,unexpanded_srcmd5,
|
"""INSERT INTO revisions (project,package,rev,unexpanded_srcmd5,
|
||||||
commit_time, userid, comment, api_url) VALUES(%s,%s,%s,%s,%s,%s,%s,%s) RETURNING id""",
|
commit_time, userid, comment) VALUES(%s,%s,%s,%s,%s,%s,%s) RETURNING id""",
|
||||||
(
|
(
|
||||||
linked.project,
|
linked.project,
|
||||||
linked.package,
|
linked.package,
|
||||||
@ -152,7 +132,6 @@ class Importer:
|
|||||||
rev.commit_time,
|
rev.commit_time,
|
||||||
"buildservice-autocommit",
|
"buildservice-autocommit",
|
||||||
comment,
|
comment,
|
||||||
linked.api_url,
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
new_id = cur.fetchone()[0]
|
new_id = cur.fetchone()[0]
|
||||||
@ -165,17 +144,19 @@ class Importer:
|
|||||||
(rev.dbid, linked.dbid),
|
(rev.dbid, linked.dbid),
|
||||||
)
|
)
|
||||||
|
|
||||||
def revisions_without_files(self, package):
|
def revisions_without_files(self, db):
|
||||||
logging.debug(f"revisions_without_files({package})")
|
with db.cursor() as cur:
|
||||||
with self.db.cursor() as cur:
|
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"SELECT * FROM revisions WHERE package=%s AND broken=FALSE AND expanded_srcmd5 IS NULL",
|
"SELECT * FROM revisions WHERE broken=FALSE AND expanded_srcmd5 IS NULL"
|
||||||
(package,),
|
|
||||||
)
|
)
|
||||||
return [DBRevision(self.db, row) for row in cur.fetchall()]
|
return [DBRevision(row) for row in cur.fetchall()]
|
||||||
|
|
||||||
def import_rev(self, rev):
|
def fill_file_lists(self, db):
|
||||||
with self.db.cursor() as cur:
|
self.find_linked_revs(db)
|
||||||
|
|
||||||
|
self.find_fake_revisions(db)
|
||||||
|
for rev in self.revisions_without_files(db):
|
||||||
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""SELECT unexpanded_srcmd5 from revisions WHERE
|
"""SELECT unexpanded_srcmd5 from revisions WHERE
|
||||||
id=(SELECT linked_id FROM linked_revs WHERE revision_id=%s)""",
|
id=(SELECT linked_id FROM linked_revs WHERE revision_id=%s)""",
|
||||||
@ -184,99 +165,49 @@ class Importer:
|
|||||||
linked_rev = cur.fetchone()
|
linked_rev = cur.fetchone()
|
||||||
if linked_rev:
|
if linked_rev:
|
||||||
linked_rev = linked_rev[0]
|
linked_rev = linked_rev[0]
|
||||||
obs_dir_list = self.obs.list(
|
list = self.obs.list(
|
||||||
rev.project, rev.package, rev.unexpanded_srcmd5, linked_rev
|
rev.project, rev.package, rev.unexpanded_srcmd5, linked_rev
|
||||||
)
|
)
|
||||||
if obs_dir_list:
|
if list:
|
||||||
rev.import_dir_list(obs_dir_list)
|
rev.import_dir_list(db, list)
|
||||||
md5 = rev.calculate_files_hash()
|
md5 = rev.calculate_files_hash(db)
|
||||||
with self.db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"UPDATE revisions SET files_hash=%s WHERE id=%s",
|
"UPDATE revisions SET files_hash=%s WHERE id=%s",
|
||||||
(md5, rev.dbid),
|
(md5, rev.dbid),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
rev.set_broken()
|
rev.set_broken(db)
|
||||||
|
|
||||||
def fill_file_lists(self):
|
def refresh_package(self, db, project, package):
|
||||||
self.find_linked_revs()
|
|
||||||
|
|
||||||
self.find_fake_revisions()
|
|
||||||
for package in self.packages:
|
|
||||||
for rev in self.revisions_without_files(package):
|
|
||||||
print(f"rev {rev} is without files")
|
|
||||||
self.import_rev(rev)
|
|
||||||
|
|
||||||
def refresh_package(self, project, package):
|
|
||||||
key = f"{project}/{package}"
|
key = f"{project}/{package}"
|
||||||
if key in self.refreshed_packages:
|
if key in self.refreshed_packages:
|
||||||
# refreshing once is good enough
|
# refreshing once is good enough
|
||||||
return
|
return
|
||||||
if self.package_gone(key):
|
|
||||||
return
|
|
||||||
logging.debug(f"Refresh {project}/{package}")
|
|
||||||
self.refreshed_packages.add(key)
|
self.refreshed_packages.add(key)
|
||||||
if self.has_scmsync(project) or self.has_scmsync(key):
|
self.update_db_package(db, project, package)
|
||||||
self.packages_with_scmsync.add(package)
|
self.fetch_all_linked_packages(db, project, package)
|
||||||
logging.debug(f"{project}/{package} already in Git - skipping")
|
|
||||||
return
|
|
||||||
self.update_db_package(project, package)
|
|
||||||
self.fetch_all_linked_packages(project, package)
|
|
||||||
|
|
||||||
def import_into_db(self):
|
def import_into_db(self):
|
||||||
for package in self.packages:
|
db = DB()
|
||||||
refresh_package(self, self.project, package)
|
|
||||||
|
|
||||||
self.db.conn.commit()
|
self.refresh_package(db, self.project, self.package)
|
||||||
|
for number in DBRevision.requests_to_fetch(db):
|
||||||
for number in DBRevision.requests_to_fetch(self.db):
|
self.obs.request(number).import_into_db(db)
|
||||||
self.import_request(number)
|
with db.cursor() as cur:
|
||||||
|
|
||||||
self.db.conn.commit()
|
|
||||||
|
|
||||||
with self.db.cursor() as cur:
|
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"""SELECT DISTINCT source_project,source_package FROM requests
|
"""SELECT DISTINCT source_project,source_package FROM requests
|
||||||
WHERE id IN (SELECT request_id FROM revisions WHERE project=%s and package = ANY(%s));""",
|
WHERE id IN (SELECT request_id FROM revisions WHERE project=%s and package=%s);""",
|
||||||
(self.project, self.packages),
|
(self.project, self.package),
|
||||||
)
|
)
|
||||||
for project, package in cur.fetchall():
|
for project, package in cur.fetchall():
|
||||||
self.refresh_package(project, package)
|
self.refresh_package(db, project, package)
|
||||||
|
|
||||||
self.db.conn.commit()
|
missing_users = User.missing_users(db)
|
||||||
|
|
||||||
missing_users = User.missing_users(self.db)
|
|
||||||
for userid in missing_users:
|
for userid in missing_users:
|
||||||
missing_user = self.obs.user(userid)
|
missing_user = self.obs.user(userid)
|
||||||
if missing_user:
|
if missing_user:
|
||||||
missing_user.import_into_db(self.db)
|
missing_user.import_into_db(db)
|
||||||
self.db.conn.commit()
|
|
||||||
|
|
||||||
self.fill_file_lists()
|
|
||||||
self.db.conn.commit()
|
|
||||||
|
|
||||||
def package_gone(self, key):
|
|
||||||
if not self.gone_packages_set:
|
|
||||||
self.gone_packages_set = set()
|
|
||||||
with open(pathlib.Path(__file__).parent.parent / "gone-packages.txt") as f:
|
|
||||||
for line in f.readlines():
|
|
||||||
self.gone_packages_set.add(line.strip())
|
|
||||||
return key in self.gone_packages_set
|
|
||||||
|
|
||||||
def has_scmsync(self, key):
|
|
||||||
if key in self.scmsync_cache:
|
|
||||||
return self.scmsync_cache[key]
|
|
||||||
|
|
||||||
root = self.obs._meta(key)
|
|
||||||
scmsync = None
|
|
||||||
scmsync_exists = False
|
|
||||||
if root and root.find('scmsync') is not None:
|
|
||||||
scmsync = root.find('scmsync').text
|
|
||||||
if scmsync:
|
|
||||||
scmsync_exists = scmsync.startswith('https://src.opensuse.org/pool/')
|
|
||||||
self.scmsync_cache[key] = scmsync_exists
|
|
||||||
return scmsync_exists
|
|
||||||
|
|
||||||
def package_with_scmsync(self, package):
|
|
||||||
return package in self.packages_with_scmsync
|
|
||||||
|
|
||||||
|
self.fill_file_lists(db)
|
||||||
|
db.conn.commit()
|
||||||
|
194
lib/lfs_oid.py
194
lib/lfs_oid.py
@ -1,194 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from lib.binary import is_text_mimetype
|
|
||||||
from lib.db import DB
|
|
||||||
|
|
||||||
|
|
||||||
# no need for this class yet, so just leave the migration code here
|
|
||||||
class LFSOid:
|
|
||||||
def __init__(self, db: DB) -> None:
|
|
||||||
self.db = db
|
|
||||||
self.dbid = None
|
|
||||||
self.project = None
|
|
||||||
self.package = None
|
|
||||||
self.filename = None
|
|
||||||
self.revision = None
|
|
||||||
self.sha = None
|
|
||||||
self.size = None
|
|
||||||
self.mimetype = None
|
|
||||||
self.file_md5 = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def check_all(db, package):
|
|
||||||
with db.cursor() as cur:
|
|
||||||
cur.execute(
|
|
||||||
"SELECT lfs_oid_id FROM lfs_oid_in_package WHERE package=%s ORDER BY lfs_oid_id DESC limit 10 ",
|
|
||||||
(package,),
|
|
||||||
)
|
|
||||||
for row in cur.fetchall():
|
|
||||||
oid = LFSOid(db).set_from_dbid(row[0])
|
|
||||||
if not oid.check():
|
|
||||||
oid.register()
|
|
||||||
|
|
||||||
def add(
|
|
||||||
self,
|
|
||||||
project: str,
|
|
||||||
package: str,
|
|
||||||
filename: str,
|
|
||||||
revision: str,
|
|
||||||
sha256: str,
|
|
||||||
size: int,
|
|
||||||
mimetype: str,
|
|
||||||
file_md5: str,
|
|
||||||
) -> None:
|
|
||||||
with self.db.cursor() as cur:
|
|
||||||
# we UPDATE here so the return functions. conflicts are likely as we look for filename/md5 but conflict on sha256
|
|
||||||
cur.execute(
|
|
||||||
"""INSERT INTO lfs_oids (project,package,filename,rev,sha256,size,mimetype,file_md5)
|
|
||||||
VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
|
||||||
ON CONFLICT (sha256,size) DO UPDATE SET mimetype=EXCLUDED.mimetype
|
|
||||||
RETURNING id""",
|
|
||||||
(
|
|
||||||
project,
|
|
||||||
package,
|
|
||||||
filename,
|
|
||||||
revision,
|
|
||||||
sha256,
|
|
||||||
size,
|
|
||||||
mimetype,
|
|
||||||
file_md5,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
row = cur.fetchone()
|
|
||||||
lfs_oid_id = row[0]
|
|
||||||
cur.execute(
|
|
||||||
"""INSERT INTO lfs_oid_in_package (package,filename,lfs_oid_id)
|
|
||||||
VALUES (%s,%s,%s)""",
|
|
||||||
(package, filename, lfs_oid_id),
|
|
||||||
)
|
|
||||||
if is_text_mimetype(mimetype):
|
|
||||||
cur.execute(
|
|
||||||
"INSERT INTO text_files (package,filename) VALUES (%s,%s)",
|
|
||||||
(package, filename),
|
|
||||||
)
|
|
||||||
self.db.conn.commit()
|
|
||||||
self.set_from_dbid(lfs_oid_id)
|
|
||||||
if not self.check():
|
|
||||||
self.register()
|
|
||||||
|
|
||||||
def check(self):
|
|
||||||
url = f"http://localhost:9999/check/{self.sha256}/{self.size}"
|
|
||||||
response = requests.get(
|
|
||||||
url,
|
|
||||||
timeout=10,
|
|
||||||
)
|
|
||||||
return response.status_code == 200
|
|
||||||
|
|
||||||
def set_from_dbid(self, dbid: int) -> LFSOid:
|
|
||||||
with self.db.cursor() as cur:
|
|
||||||
cur.execute("SELECT * from lfs_oids where id=%s", (dbid,))
|
|
||||||
row = cur.fetchone()
|
|
||||||
self.set_from_row(row)
|
|
||||||
assert self.dbid == dbid
|
|
||||||
return self
|
|
||||||
|
|
||||||
def set_from_row(self, row: list) -> LFSOid:
|
|
||||||
(
|
|
||||||
self.dbid,
|
|
||||||
self.project,
|
|
||||||
self.package,
|
|
||||||
self.filename,
|
|
||||||
self.revision,
|
|
||||||
self.sha256,
|
|
||||||
self.size,
|
|
||||||
self.mimetype,
|
|
||||||
self.file_md5,
|
|
||||||
) = row
|
|
||||||
return self
|
|
||||||
|
|
||||||
def register(self):
|
|
||||||
if not os.getenv("GITEA_REGISTER_SECRET"):
|
|
||||||
logging.info("Not registering LFS due to missing secret")
|
|
||||||
return
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"secret": os.getenv("GITEA_REGISTER_SECRET"),
|
|
||||||
"project": self.project,
|
|
||||||
"package": self.package,
|
|
||||||
"filename": self.filename,
|
|
||||||
"rev": self.revision,
|
|
||||||
"sha256": self.sha256,
|
|
||||||
"size": self.size,
|
|
||||||
}
|
|
||||||
|
|
||||||
url = "http://localhost:9999/register"
|
|
||||||
response = requests.post(
|
|
||||||
url,
|
|
||||||
json=data,
|
|
||||||
timeout=10,
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
logging.info(f"Register LFS returned {response.status_code}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
"""
|
|
||||||
Import the old data - it only makes sense on a DB with previously scanned revisions
|
|
||||||
curl -s https://stephan.kulow.org/git_lfs.csv.xz | xz -cd | PYTHONPATH=$PWD /usr/bin/python3 lib/lfs_oid.py
|
|
||||||
"""
|
|
||||||
db = DB()
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
|
||||||
with db.cursor() as cur:
|
|
||||||
while True:
|
|
||||||
line = sys.stdin.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
(
|
|
||||||
project,
|
|
||||||
package,
|
|
||||||
filename,
|
|
||||||
rev,
|
|
||||||
sha256,
|
|
||||||
size,
|
|
||||||
mimetype,
|
|
||||||
md5,
|
|
||||||
) = line.strip().split("\t")
|
|
||||||
cur.execute(
|
|
||||||
"""INSERT INTO lfs_oids (project,package,filename,rev,sha256,size,mimetype,file_md5)
|
|
||||||
VALUES (%s,%s,%s,%s,%s,%s,%s,%s) ON CONFLICT DO NOTHING""",
|
|
||||||
(project, package, filename, rev, sha256, size, mimetype, md5),
|
|
||||||
)
|
|
||||||
|
|
||||||
cur.execute(
|
|
||||||
"""
|
|
||||||
CREATE TEMPORARY TABLE lfs_oid_in_revision (
|
|
||||||
revision_id INTEGER,
|
|
||||||
lfs_oid_id INTEGER NOT NULL,
|
|
||||||
name VARCHAR(255) NOT NULL
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
cur.execute(
|
|
||||||
"""INSERT INTO lfs_oid_in_revision (revision_id, lfs_oid_id, name)
|
|
||||||
SELECT revision_id,lfs_oids.id,files.name FROM lfs_oids JOIN files ON files.md5=lfs_oids.file_md5"""
|
|
||||||
)
|
|
||||||
cur.execute(
|
|
||||||
"""INSERT INTO text_files (package,filename)
|
|
||||||
SELECT DISTINCT r.package, lfs_oid_in_revision.name FROM lfs_oids
|
|
||||||
JOIN lfs_oid_in_revision on lfs_oid_in_revision.lfs_oid_id=lfs_oids.id
|
|
||||||
JOIN revisions r ON r.id=lfs_oid_in_revision.revision_id
|
|
||||||
WHERE lfs_oids.mimetype like 'text/%' ON CONFLICT DO NOTHING"""
|
|
||||||
)
|
|
||||||
cur.execute(
|
|
||||||
"""INSERT INTO lfs_oid_in_package (lfs_oid_id, package, filename)
|
|
||||||
SELECT DISTINCT lfs_oids.id,r.package, lfs_oid_in_revision.name FROM lfs_oids
|
|
||||||
JOIN lfs_oid_in_revision on lfs_oid_in_revision.lfs_oid_id=lfs_oids.id
|
|
||||||
JOIN revisions r ON r.id=lfs_oid_in_revision.revision_id"""
|
|
||||||
)
|
|
||||||
db.conn.commit()
|
|
42
lib/obs.py
42
lib/obs.py
@ -1,7 +1,5 @@
|
|||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import time
|
import time
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
@ -9,7 +7,6 @@ from urllib.error import HTTPError
|
|||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
|
|
||||||
from lib.hash import md5
|
|
||||||
from lib.request import Request
|
from lib.request import Request
|
||||||
from lib.user import User
|
from lib.user import User
|
||||||
|
|
||||||
@ -59,12 +56,11 @@ osc.core.http_GET = retry(osc.core.http_GET)
|
|||||||
|
|
||||||
|
|
||||||
class OBS:
|
class OBS:
|
||||||
def __init__(self, url):
|
def __init__(self, url=None):
|
||||||
self.url = None
|
if url:
|
||||||
self.change_url(url)
|
self.change_url(url)
|
||||||
|
|
||||||
def change_url(self, url):
|
def change_url(self, url):
|
||||||
if url != self.url:
|
|
||||||
self.url = url
|
self.url = url
|
||||||
osc.conf.get_config(override_apiurl=url)
|
osc.conf.get_config(override_apiurl=url)
|
||||||
|
|
||||||
@ -73,11 +69,11 @@ class OBS:
|
|||||||
logging.debug(f"GET {url}")
|
logging.debug(f"GET {url}")
|
||||||
return ET.parse(osc.core.http_GET(url)).getroot()
|
return ET.parse(osc.core.http_GET(url)).getroot()
|
||||||
|
|
||||||
def _meta(self, key, **params):
|
def _meta(self, project, package, **params):
|
||||||
try:
|
try:
|
||||||
root = self._xml(f"source/{key}/_meta", **params)
|
root = self._xml(f"source/{project}/{package}/_meta", **params)
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
logging.error(f"Project/Package [{key} {params}] has no meta")
|
logging.error(f"Package [{project}/{package} {params}] has no meta")
|
||||||
return None
|
return None
|
||||||
return root
|
return root
|
||||||
|
|
||||||
@ -118,13 +114,13 @@ class OBS:
|
|||||||
return root
|
return root
|
||||||
|
|
||||||
def exists(self, project, package):
|
def exists(self, project, package):
|
||||||
root = self._meta(f"{project}/{package}")
|
root = self._meta(project, package)
|
||||||
if root is None:
|
if root is None:
|
||||||
return False
|
return False
|
||||||
return root.get("project") == project
|
return root.get("project") == project
|
||||||
|
|
||||||
def devel_project(self, project, package):
|
def devel_project(self, project, package):
|
||||||
root = self._meta(f"{project}/{package}")
|
root = self._meta(project, package)
|
||||||
devel = root.find("devel")
|
devel = root.find("devel")
|
||||||
if devel is None:
|
if devel is None:
|
||||||
return None
|
return None
|
||||||
@ -150,7 +146,7 @@ class OBS:
|
|||||||
def _download(self, project, package, name, revision):
|
def _download(self, project, package, name, revision):
|
||||||
url = osc.core.makeurl(
|
url = osc.core.makeurl(
|
||||||
self.url,
|
self.url,
|
||||||
["source", project, package, name],
|
["source", project, package, urllib.parse.quote(name)],
|
||||||
{"rev": revision, "expand": 1},
|
{"rev": revision, "expand": 1},
|
||||||
)
|
)
|
||||||
return osc.core.http_GET(url)
|
return osc.core.http_GET(url)
|
||||||
@ -162,25 +158,11 @@ class OBS:
|
|||||||
name: str,
|
name: str,
|
||||||
revision: str,
|
revision: str,
|
||||||
dirpath: str,
|
dirpath: str,
|
||||||
cachedir: str,
|
|
||||||
file_md5: str,
|
file_md5: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
cached_file = self._path_from_md5(name, cachedir, file_md5)
|
|
||||||
if not self.in_cache(name, cachedir, file_md5):
|
|
||||||
with (dirpath / name).open("wb") as f:
|
with (dirpath / name).open("wb") as f:
|
||||||
logging.debug(f"Download {project}/{package}/{name}")
|
|
||||||
f.write(self._download(project, package, name, revision).read())
|
f.write(self._download(project, package, name, revision).read())
|
||||||
|
|
||||||
# Validate the MD5 of the downloaded file
|
|
||||||
if md5(dirpath / name) != file_md5:
|
|
||||||
raise Exception(f"Download error in {name}")
|
|
||||||
|
|
||||||
shutil.copy(dirpath / name, cached_file.with_suffix(".new"))
|
|
||||||
os.rename(cached_file.with_suffix(".new"), cached_file)
|
|
||||||
else:
|
|
||||||
shutil.copy(cached_file, dirpath / name)
|
|
||||||
logging.debug(f"Use cached {project}/{package}/{name}")
|
|
||||||
|
|
||||||
def list(self, project, package, srcmd5, linkrev):
|
def list(self, project, package, srcmd5, linkrev):
|
||||||
params = {"rev": srcmd5, "expand": "1"}
|
params = {"rev": srcmd5, "expand": "1"}
|
||||||
if linkrev:
|
if linkrev:
|
||||||
@ -197,11 +179,3 @@ class OBS:
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
return root
|
return root
|
||||||
|
|
||||||
def _path_from_md5(self, name, cachedir, md5):
|
|
||||||
filepath = cachedir / md5[:3]
|
|
||||||
filepath.mkdir(parents=True, exist_ok=True)
|
|
||||||
return filepath / md5[3:]
|
|
||||||
|
|
||||||
def in_cache(self, name, cachedir, md5):
|
|
||||||
return self._path_from_md5(name, cachedir, md5).exists()
|
|
||||||
|
@ -1,89 +1,106 @@
|
|||||||
|
import functools
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
|
import urllib
|
||||||
|
|
||||||
try:
|
import requests
|
||||||
import magic
|
|
||||||
except:
|
|
||||||
print("Install python3-python-magic, not python3-magic")
|
|
||||||
raise
|
|
||||||
|
|
||||||
from lib.db import DB
|
|
||||||
from lib.lfs_oid import LFSOid
|
def _hash(hash_alg, file_or_path):
|
||||||
from lib.obs import OBS
|
h = hash_alg()
|
||||||
|
|
||||||
|
def __hash(f):
|
||||||
|
while chunk := f.read(1024 * 4):
|
||||||
|
h.update(chunk)
|
||||||
|
|
||||||
|
if hasattr(file_or_path, "read"):
|
||||||
|
__hash(file_or_path)
|
||||||
|
else:
|
||||||
|
with file_or_path.open("rb") as f:
|
||||||
|
__hash(f)
|
||||||
|
return h.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
md5 = functools.partial(_hash, hashlib.md5)
|
||||||
|
sha256 = functools.partial(_hash, hashlib.sha256)
|
||||||
|
|
||||||
|
|
||||||
class ProxySHA256:
|
class ProxySHA256:
|
||||||
def __init__(self, obs: OBS, db: DB):
|
def __init__(self, obs, url=None, enabled=True):
|
||||||
self.obs = obs
|
self.obs = obs
|
||||||
self.db = db
|
self.url = url if url else "http://source.dyn.cloud.suse.de"
|
||||||
|
self.enabled = enabled
|
||||||
self.hashes = None
|
self.hashes = None
|
||||||
self.texts = None
|
self.texts = set()
|
||||||
self.mime = None
|
|
||||||
|
def load_package(self, package):
|
||||||
|
# _project is unreachable for the proxy - due to being a fake package
|
||||||
|
if package == "_project":
|
||||||
|
self.enabled = False
|
||||||
|
self.texts = set(["_config", "_service"])
|
||||||
|
self.hashes = dict()
|
||||||
|
return
|
||||||
|
logging.info("Retrieve all previously defined SHA256")
|
||||||
|
response = requests.get(f"http://source.dyn.cloud.suse.de/package/{package}")
|
||||||
|
if response.status_code == 200:
|
||||||
|
json = response.json()
|
||||||
|
self.hashes = json["shas"]
|
||||||
|
self.texts = set(json["texts"])
|
||||||
|
|
||||||
def get(self, package, name, file_md5):
|
def get(self, package, name, file_md5):
|
||||||
if self.hashes is None:
|
|
||||||
self.load_hashes(package)
|
|
||||||
key = f"{file_md5}-{name}"
|
key = f"{file_md5}-{name}"
|
||||||
ret = self.hashes.get(key)
|
if self.hashes is None:
|
||||||
return ret
|
if self.enabled:
|
||||||
|
self.load_package(package)
|
||||||
|
else:
|
||||||
|
self.hashes = {}
|
||||||
|
return self.hashes.get(key, None)
|
||||||
|
|
||||||
def load_hashes(self, package):
|
def _proxy_put(self, project, package, name, revision, file_md5, size):
|
||||||
with self.db.cursor() as cur:
|
quoted_name = urllib.parse.quote(name)
|
||||||
cur.execute(
|
url = f"{self.obs.url}/public/source/{project}/{package}/{quoted_name}?rev={revision}"
|
||||||
"""SELECT lfs_oids.file_md5,lop.filename,lfs_oids.sha256,lfs_oids.size
|
response = requests.put(
|
||||||
FROM lfs_oid_in_package lop
|
self.url,
|
||||||
JOIN lfs_oids ON lfs_oids.id=lop.lfs_oid_id
|
data={
|
||||||
WHERE lop.package=%s""",
|
"hash": file_md5,
|
||||||
(package,),
|
"filename": name,
|
||||||
|
"url": url,
|
||||||
|
"package": package,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
self.hashes = {
|
if response.status_code != 200:
|
||||||
f"{row[0]}-{row[1]}": (row[2], row[3]) for row in cur.fetchall()
|
raise Exception(f"Redirector error on {self.url} for {url}")
|
||||||
|
|
||||||
|
key = (file_md5, name)
|
||||||
|
self.hashes[key] = {
|
||||||
|
"sha256": response.content.decode("utf-8"),
|
||||||
|
"fsize": size,
|
||||||
}
|
}
|
||||||
|
return self.hashes[key]
|
||||||
|
|
||||||
|
def _obs_put(self, project, package, name, revision, file_md5, size):
|
||||||
|
key = (file_md5, name)
|
||||||
|
self.hashes[key] = {
|
||||||
|
"sha256": sha256(self.obs._download(project, package, name, revision)),
|
||||||
|
"fsize": size,
|
||||||
|
}
|
||||||
|
return self.hashes[key]
|
||||||
|
|
||||||
def put(self, project, package, name, revision, file_md5, size):
|
def put(self, project, package, name, revision, file_md5, size):
|
||||||
if not self.mime:
|
if not self.enabled:
|
||||||
self.mime = magic.Magic(mime=True)
|
return self._obs_put(project, package, name, revision, file_md5, size)
|
||||||
|
return self._proxy_put(project, package, name, revision, file_md5, size)
|
||||||
|
|
||||||
mimetype = None
|
def is_text(self, filename):
|
||||||
logging.debug(f"Add LFS for {project}/{package}/{name}")
|
|
||||||
fin = self.obs._download(project, package, name, revision)
|
|
||||||
sha = hashlib.sha256()
|
|
||||||
while True:
|
|
||||||
buffer = fin.read(10000)
|
|
||||||
if not buffer:
|
|
||||||
break
|
|
||||||
sha.update(buffer)
|
|
||||||
# only guess from the first 10K
|
|
||||||
if not mimetype:
|
|
||||||
mimetype = self.mime.from_buffer(buffer)
|
|
||||||
fin.close()
|
|
||||||
LFSOid(self.db).add(
|
|
||||||
project, package, name, revision, sha.hexdigest(), size, mimetype, file_md5
|
|
||||||
)
|
|
||||||
|
|
||||||
# reset
|
|
||||||
self.hashes = None
|
|
||||||
self.texts = None
|
|
||||||
return self.get(package, name, file_md5)
|
|
||||||
|
|
||||||
def is_text(self, package, filename):
|
|
||||||
if self.texts is None:
|
|
||||||
self.load_texts(package)
|
|
||||||
return filename in self.texts
|
return filename in self.texts
|
||||||
|
|
||||||
def load_texts(self, package):
|
|
||||||
self.texts = set()
|
|
||||||
with self.db.cursor() as cur:
|
|
||||||
cur.execute("SELECT filename from text_files where package=%s", (package,))
|
|
||||||
for row in cur.fetchall():
|
|
||||||
self.texts.add(row[0])
|
|
||||||
|
|
||||||
def get_or_put(self, project, package, name, revision, file_md5, size):
|
def get_or_put(self, project, package, name, revision, file_md5, size):
|
||||||
result = self.get(package, name, file_md5)
|
result = self.get(package, name, file_md5)
|
||||||
if not result:
|
if not result:
|
||||||
result = self.put(project, package, name, revision, file_md5, size)
|
result = self.put(project, package, name, revision, file_md5, size)
|
||||||
|
|
||||||
sha256, db_size = result
|
# Sanity check
|
||||||
assert db_size == size
|
if result["fsize"] != size:
|
||||||
|
raise Exception(f"Redirector has different size for {name}")
|
||||||
|
|
||||||
return sha256
|
return result
|
||||||
|
@ -16,11 +16,11 @@ class TestExporter:
|
|||||||
db = DB()
|
db = DB()
|
||||||
with db.cursor() as cur:
|
with db.cursor() as cur:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
"SELECT * from revisions where package=%s ORDER BY commit_time",
|
"SELECT * from revisions where package=%s ORDER BY project,rev",
|
||||||
(self.package,),
|
(self.package,),
|
||||||
)
|
)
|
||||||
data = {"revisions": []}
|
data = {"revisions": []}
|
||||||
for row in cur.fetchall():
|
for row in cur.fetchall():
|
||||||
data["revisions"].append(DBRevision(db, row).as_dict())
|
data["revisions"].append(DBRevision(row).as_dict(db))
|
||||||
|
|
||||||
yaml.dump(data, sys.stdout, default_flow_style=False)
|
yaml.dump(data, sys.stdout, default_flow_style=False)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from typing import Dict
|
||||||
from xmlrpc.client import Boolean
|
from xmlrpc.client import Boolean
|
||||||
|
|
||||||
from lib.db_revision import DBRevision
|
from lib.db_revision import DBRevision
|
||||||
@ -103,24 +104,14 @@ class TreeBuilder:
|
|||||||
"""For a given revision in the target, find the node in the source chain
|
"""For a given revision in the target, find the node in the source chain
|
||||||
that matches the files"""
|
that matches the files"""
|
||||||
node = source_chain
|
node = source_chain
|
||||||
candidates = []
|
|
||||||
while node:
|
while node:
|
||||||
# exclude reverts happening after the merge
|
# exclude reverts happening after the merge
|
||||||
if (
|
if (
|
||||||
node.revision.commit_time <= revision.commit_time
|
node.revision.commit_time <= revision.commit_time
|
||||||
and node.revision.files_hash == revision.files_hash
|
and node.revision.files_hash == revision.files_hash
|
||||||
):
|
):
|
||||||
candidates.append(node)
|
return node
|
||||||
if node.merged_into:
|
|
||||||
# we can't have candidates that are crossing previous merges
|
|
||||||
# see https://src.opensuse.org/importers/git-importer/issues/14
|
|
||||||
candidates = []
|
|
||||||
node = node.parent
|
node = node.parent
|
||||||
if candidates:
|
|
||||||
# the first candidate is the youngest one that matches the check. That's
|
|
||||||
# good enough. See FastCGI test case for rev 36 and 38: 37 reverted 36 and
|
|
||||||
# then 38 reverting the revert before it was submitted.
|
|
||||||
return candidates[0]
|
|
||||||
|
|
||||||
def add_merge_points(self, factory_revisions):
|
def add_merge_points(self, factory_revisions):
|
||||||
"""For all target revisions that accepted a request, look up the merge
|
"""For all target revisions that accepted a request, look up the merge
|
||||||
@ -137,7 +128,7 @@ class TreeBuilder:
|
|||||||
self.requests.add(node.revision.request_id)
|
self.requests.add(node.revision.request_id)
|
||||||
|
|
||||||
class FindMergeWalker(AbstractWalker):
|
class FindMergeWalker(AbstractWalker):
|
||||||
def __init__(self, builder: TreeBuilder, requests: dict) -> None:
|
def __init__(self, builder: TreeBuilder, requests: Dict) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.source_revisions = dict()
|
self.source_revisions = dict()
|
||||||
self.builder = builder
|
self.builder = builder
|
||||||
|
20
lib/user.py
20
lib/user.py
@ -1,7 +1,3 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from lib.db import DB
|
|
||||||
|
|
||||||
FAKE_ACCOUNTS = (
|
FAKE_ACCOUNTS = (
|
||||||
"unknown",
|
"unknown",
|
||||||
"buildservice-autocommit",
|
"buildservice-autocommit",
|
||||||
@ -19,22 +15,6 @@ FAKE_ACCOUNTS = (
|
|||||||
|
|
||||||
|
|
||||||
class User:
|
class User:
|
||||||
@staticmethod
|
|
||||||
def find(db: DB, userid: str) -> User:
|
|
||||||
row = User.lookup(db, userid)
|
|
||||||
self = User()
|
|
||||||
self.userid = userid
|
|
||||||
if row:
|
|
||||||
(_, _, self.email, self.realname) = row
|
|
||||||
else:
|
|
||||||
self.email = ""
|
|
||||||
self.realname = ""
|
|
||||||
if not self.email:
|
|
||||||
self.email = "null@suse.de"
|
|
||||||
if not self.realname:
|
|
||||||
self.realname = f"OBS User {userid}"
|
|
||||||
return self
|
|
||||||
|
|
||||||
def parse(self, xml, userid):
|
def parse(self, xml, userid):
|
||||||
self.userid = userid
|
self.userid = userid
|
||||||
self.realname = xml.find("realname").text
|
self.realname = xml.find("realname").text
|
||||||
|
@ -1,61 +0,0 @@
|
|||||||
#!/usr/bin/python3
|
|
||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
import pika
|
|
||||||
import random
|
|
||||||
import time
|
|
||||||
|
|
||||||
MY_TASKS_DIR = Path(__file__).parent / "tasks"
|
|
||||||
|
|
||||||
|
|
||||||
def listen_events():
|
|
||||||
connection = pika.BlockingConnection(
|
|
||||||
pika.URLParameters("amqps://opensuse:opensuse@rabbit.opensuse.org")
|
|
||||||
)
|
|
||||||
channel = connection.channel()
|
|
||||||
|
|
||||||
channel.exchange_declare(
|
|
||||||
exchange="pubsub", exchange_type="topic", passive=True, durable=False
|
|
||||||
)
|
|
||||||
|
|
||||||
result = channel.queue_declare("", exclusive=True)
|
|
||||||
queue_name = result.method.queue
|
|
||||||
|
|
||||||
channel.queue_bind(
|
|
||||||
exchange="pubsub", queue=queue_name, routing_key="opensuse.obs.package.commit"
|
|
||||||
)
|
|
||||||
|
|
||||||
print(" [*] Waiting for logs. To exit press CTRL+C")
|
|
||||||
|
|
||||||
def callback(ch, method, properties, body):
|
|
||||||
if method.routing_key not in ("opensuse.obs.package.commit",):
|
|
||||||
return
|
|
||||||
body = json.loads(body)
|
|
||||||
if (
|
|
||||||
"project" in body
|
|
||||||
and "package" in body
|
|
||||||
and body["project"] == "openSUSE:Factory"
|
|
||||||
):
|
|
||||||
# Strip multibuild flavors
|
|
||||||
package = body["package"].partition(':')[0]
|
|
||||||
if "/" in package:
|
|
||||||
return
|
|
||||||
|
|
||||||
(MY_TASKS_DIR / package).touch()
|
|
||||||
print(" [x] %r:%r" % (method.routing_key, body["package"]))
|
|
||||||
|
|
||||||
channel.basic_consume(queue_name, callback, auto_ack=True)
|
|
||||||
|
|
||||||
channel.start_consuming()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
listen_events()
|
|
||||||
except (pika.exceptions.ConnectionClosed, pika.exceptions.AMQPHeartbeatTimeout):
|
|
||||||
time.sleep(random.randint(10, 100))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
1
tasks/.gitignore
vendored
1
tasks/.gitignore
vendored
@ -1 +0,0 @@
|
|||||||
*
|
|
@ -6,14 +6,11 @@ from lib.db_revision import DBRevision
|
|||||||
from lib.obs import OBS
|
from lib.obs import OBS
|
||||||
from lib.obs_revision import OBSRevision
|
from lib.obs_revision import OBSRevision
|
||||||
|
|
||||||
# needs to exist in local oscrc (little tricky)
|
|
||||||
API_URL = "https://api.opensuse.org"
|
|
||||||
|
|
||||||
|
|
||||||
class TestDBMethods(unittest.TestCase):
|
class TestDBMethods(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.db = DB(section="test")
|
self.db = DB(section="test")
|
||||||
self.obs = OBS(API_URL)
|
self.obs = OBS()
|
||||||
|
|
||||||
def test_import(self):
|
def test_import(self):
|
||||||
test_rev = OBSRevision(self.obs, "openSUSE:Factory", "xz")
|
test_rev = OBSRevision(self.obs, "openSUSE:Factory", "xz")
|
||||||
@ -33,7 +30,6 @@ class TestDBMethods(unittest.TestCase):
|
|||||||
db_rev = DBRevision.fetch_revision(
|
db_rev = DBRevision.fetch_revision(
|
||||||
self.db, project="openSUSE:Factory", package="xz", rev="70"
|
self.db, project="openSUSE:Factory", package="xz", rev="70"
|
||||||
)
|
)
|
||||||
self.assertEqual(db_rev.api_url, API_URL)
|
|
||||||
self.assertEqual(str(test_rev), str(db_rev))
|
self.assertEqual(str(test_rev), str(db_rev))
|
||||||
|
|
||||||
|
|
||||||
|
4528
tests/fixtures/FastCGI-data.yaml
vendored
4528
tests/fixtures/FastCGI-data.yaml
vendored
File diff suppressed because it is too large
Load Diff
33
tests/fixtures/FastCGI-expected-list.yaml
vendored
33
tests/fixtures/FastCGI-expected-list.yaml
vendored
@ -1,33 +0,0 @@
|
|||||||
- factory c:openSUSE:Factory/FastCGI/29.0 p1:openSUSE:Factory/FastCGI/28.0 p2:devel:libraries:c_c++/FastCGI/40.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/40.0 p1:devel:libraries:c_c++/FastCGI/38.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/28.0 p1:openSUSE:Factory/FastCGI/27.0 p2:devel:libraries:c_c++/FastCGI/38.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/38.0 p1:devel:libraries:c_c++/FastCGI/37.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/37.0 p1:devel:libraries:c_c++/FastCGI/36.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/36.0 p1:devel:libraries:c_c++/FastCGI/34.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/27.0 p1:openSUSE:Factory/FastCGI/26.0 p2:devel:libraries:c_c++/FastCGI/34.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/34.0 p1:devel:libraries:c_c++/FastCGI/32.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/26.0 p1:openSUSE:Factory/FastCGI/23.0 p2:devel:libraries:c_c++/FastCGI/32.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/32.0 p1:devel:libraries:c_c++/FastCGI/30.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/23.0 p1:openSUSE:Factory/FastCGI/20.0 p2:devel:libraries:c_c++/FastCGI/30.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/30.0 p1:devel:libraries:c_c++/FastCGI/28.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/20.0 p1:openSUSE:Factory/FastCGI/19.0 p2:devel:libraries:c_c++/FastCGI/28.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/28.0 p1:devel:libraries:c_c++/FastCGI/26.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/19.0 p1:openSUSE:Factory/FastCGI/18.0 p2:devel:libraries:c_c++/FastCGI/26.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/26.0 p1:devel:libraries:c_c++/FastCGI/24.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/18.0 p1:openSUSE:Factory/FastCGI/16.0 p2:devel:libraries:c_c++/FastCGI/24.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/24.0 p1:devel:libraries:c_c++/FastCGI/22.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/16.0 p1:openSUSE:Factory/FastCGI/15.0 p2:devel:libraries:c_c++/FastCGI/22.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/22.0 p1:devel:libraries:c_c++/FastCGI/20.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/15.0 p1:openSUSE:Factory/FastCGI/14.0 p2:devel:libraries:c_c++/FastCGI/20.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/20.0 p1:devel:libraries:c_c++/FastCGI/19.014
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/19.014 p1:devel:libraries:c_c++/FastCGI/18.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/14.0 p1:openSUSE:Factory/FastCGI/13.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/13.0 p1:openSUSE:Factory/FastCGI/11.0 p2:devel:libraries:c_c++/FastCGI/18.0
|
|
||||||
- devel c:devel:libraries:c_c++/FastCGI/18.0 p1:openSUSE:Factory/FastCGI/11.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/11.0 p1:openSUSE:Factory/FastCGI/10.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/10.0 p1:openSUSE:Factory/FastCGI/7.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/7.0 p1:openSUSE:Factory/FastCGI/6.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/6.0 p1:openSUSE:Factory/FastCGI/4.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/4.0 p1:openSUSE:Factory/FastCGI/3.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/3.0 p1:openSUSE:Factory/FastCGI/1.0
|
|
||||||
- factory c:openSUSE:Factory/FastCGI/1.0
|
|
44
tests/fixtures/FastCGI-expected-tree.yaml
vendored
44
tests/fixtures/FastCGI-expected-tree.yaml
vendored
@ -1,44 +0,0 @@
|
|||||||
- commit: openSUSE:Factory/FastCGI/29.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/40.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/28.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/38.0
|
|
||||||
- devel:libraries:c_c++/FastCGI/37.0
|
|
||||||
- devel:libraries:c_c++/FastCGI/36.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/27.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/34.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/26.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/32.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/23.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/30.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/20.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/28.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/19.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/26.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/18.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/24.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/16.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/22.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/15.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/20.0
|
|
||||||
- devel:libraries:c_c++/FastCGI/19.014
|
|
||||||
- commit: openSUSE:Factory/FastCGI/14.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/13.0
|
|
||||||
merged:
|
|
||||||
- devel:libraries:c_c++/FastCGI/18.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/11.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/10.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/7.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/6.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/4.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/3.0
|
|
||||||
- commit: openSUSE:Factory/FastCGI/1.0
|
|
9756
tests/fixtures/breeze-data.yaml
vendored
9756
tests/fixtures/breeze-data.yaml
vendored
File diff suppressed because it is too large
Load Diff
171
tests/fixtures/breeze-expected-list.yaml
vendored
171
tests/fixtures/breeze-expected-list.yaml
vendored
@ -1,171 +0,0 @@
|
|||||||
- factory c:openSUSE:Factory/breeze/43.0 p1:openSUSE:Factory/breeze/42.0 p2:KDE:Frameworks5/breeze/150.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/150.0 p1:KDE:Frameworks5/breeze/148.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/42.0 p1:openSUSE:Factory/breeze/41.0 p2:KDE:Frameworks5/breeze/148.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/148.0 p1:KDE:Frameworks5/breeze/147.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/147.0 p1:KDE:Frameworks5/breeze/145.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/41.0 p1:openSUSE:Factory/breeze/40.0 p2:KDE:Frameworks5/breeze/145.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/145.0 p1:KDE:Frameworks5/breeze/143.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/40.0 p1:openSUSE:Factory/breeze/39.0 p2:KDE:Frameworks5/breeze/143.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/143.0 p1:KDE:Frameworks5/breeze/142.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/142.0 p1:KDE:Frameworks5/breeze/141.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/141.0 p1:KDE:Frameworks5/breeze/139.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/39.0 p1:openSUSE:Factory/breeze/38.0 p2:KDE:Frameworks5/breeze/139.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/139.0 p1:KDE:Frameworks5/breeze/137.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/38.0 p1:openSUSE:Factory/breeze/37.0 p2:KDE:Frameworks5/breeze/137.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/137.0 p1:KDE:Frameworks5/breeze/136.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/136.0 p1:KDE:Frameworks5/breeze/135.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/135.0 p1:KDE:Frameworks5/breeze/134.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/134.0 p1:KDE:Frameworks5/breeze/132.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/37.0 p1:openSUSE:Factory/breeze/36.0 p2:KDE:Frameworks5/breeze/132.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/132.0 p1:KDE:Frameworks5/breeze/130.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/36.0 p1:openSUSE:Factory/breeze/35.0 p2:KDE:Frameworks5/breeze/130.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/130.0 p1:KDE:Frameworks5/breeze/128.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/35.0 p1:openSUSE:Factory/breeze/34.0 p2:KDE:Frameworks5/breeze/128.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/128.0 p1:KDE:Frameworks5/breeze/127.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/127.0 p1:KDE:Frameworks5/breeze/126.034
|
|
||||||
- devel c:KDE:Frameworks5/breeze/126.034 p1:KDE:Frameworks5/breeze/126.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/126.0 p1:KDE:Frameworks5/breeze/125.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/125.0 p1:KDE:Frameworks5/breeze/124.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/124.0 p1:KDE:Frameworks5/breeze/123.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/123.0 p1:KDE:Frameworks5/breeze/122.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/122.0 p1:KDE:Frameworks5/breeze/120.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/34.0 p1:openSUSE:Factory/breeze/33.0 p2:KDE:Frameworks5:LTS/breeze/14.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/14.0 p1:KDE:Frameworks5:LTS/breeze/13.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/13.0 p1:KDE:Frameworks5:LTS/breeze/12.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/12.0 p1:KDE:Frameworks5:LTS/breeze/11.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/33.0 p1:openSUSE:Factory/breeze/32.0 p2:KDE:Frameworks5:LTS/breeze/11.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/11.0 p1:KDE:Frameworks5:LTS/breeze/10.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/10.0 p1:KDE:Frameworks5:LTS/breeze/9.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/9.0 p1:KDE:Frameworks5:LTS/breeze/8.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/8.0 p1:KDE:Frameworks5:LTS/breeze/7.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/7.0 p1:KDE:Frameworks5:LTS/breeze/6.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/6.0 p1:KDE:Frameworks5:LTS/breeze/5.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/5.0 p1:KDE:Frameworks5:LTS/breeze/4.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/4.0 p1:KDE:Frameworks5:LTS/breeze/3.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/3.0 p1:KDE:Frameworks5:LTS/breeze/2.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/2.0 p1:KDE:Frameworks5:LTS/breeze/1.0
|
|
||||||
- devel c:KDE:Frameworks5:LTS/breeze/1.0 p1:openSUSE:Factory/breeze/32.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/32.0 p1:openSUSE:Factory/breeze/31.0 p2:KDE:Frameworks5/breeze/120.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/120.0 p1:KDE:Frameworks5/breeze/117.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/31.0 p1:openSUSE:Factory/breeze/30.0 p2:KDE:Frameworks5/breeze/117.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/117.0 p1:KDE:Frameworks5/breeze/116.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/30.0 p1:openSUSE:Factory/breeze/29.0 p2:KDE:Frameworks5/breeze/116.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/116.0 p1:KDE:Frameworks5/breeze/115.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/115.0 p1:KDE:Frameworks5/breeze/113.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/113.0 p1:KDE:Frameworks5/breeze/112.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/112.0 p1:KDE:Frameworks5/breeze/111.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/29.0 p1:openSUSE:Factory/breeze/28.0 p2:KDE:Frameworks5/breeze/111.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/111.0 p1:KDE:Frameworks5/breeze/110.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/110.0 p1:KDE:Frameworks5/breeze/109.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/109.0 p1:KDE:Frameworks5/breeze/108.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/108.0 p1:KDE:Frameworks5/breeze/107.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/107.0 p1:KDE:Frameworks5/breeze/105.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/28.0 p1:openSUSE:Factory/breeze/27.0 p2:KDE:Frameworks5/breeze/105.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/105.0 p1:KDE:Frameworks5/breeze/103.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/27.0 p1:openSUSE:Factory/breeze/26.0 p2:KDE:Frameworks5/breeze/103.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/103.0 p1:KDE:Frameworks5/breeze/100.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/26.0 p1:openSUSE:Factory/breeze/25.0 p2:KDE:Frameworks5/breeze/100.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/100.0 p1:KDE:Frameworks5/breeze/99.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/25.0 p1:openSUSE:Factory/breeze/24.0 p2:KDE:Frameworks5/breeze/99.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/99.0 p1:KDE:Frameworks5/breeze/98.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/98.0 p1:KDE:Frameworks5/breeze/97.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/97.0 p1:KDE:Frameworks5/breeze/95.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/24.0 p1:openSUSE:Factory/breeze/23.0 p2:KDE:Frameworks5/breeze/95.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/95.0 p1:KDE:Frameworks5/breeze/93.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/23.0 p1:openSUSE:Factory/breeze/22.0 p2:KDE:Frameworks5/breeze/93.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/93.0 p1:KDE:Frameworks5/breeze/91.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/22.0 p1:openSUSE:Factory/breeze/21.0 p2:KDE:Frameworks5/breeze/91.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/91.0 p1:KDE:Frameworks5/breeze/88.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/21.0 p1:openSUSE:Factory/breeze/20.0 p2:KDE:Frameworks5/breeze/88.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/88.0 p1:KDE:Frameworks5/breeze/87.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/20.0 p1:openSUSE:Factory/breeze/19.0 p2:KDE:Frameworks5/breeze/87.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/87.0 p1:KDE:Frameworks5/breeze/86.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/86.0 p1:KDE:Frameworks5/breeze/85.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/85.0 p1:KDE:Frameworks5/breeze/84.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/84.0 p1:KDE:Frameworks5/breeze/83.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/83.0 p1:KDE:Frameworks5/breeze/82.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/82.0 p1:KDE:Frameworks5/breeze/81.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/81.0 p1:KDE:Frameworks5/breeze/80.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/80.0 p1:KDE:Frameworks5/breeze/79.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/79.0 p1:KDE:Frameworks5/breeze/78.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/78.0 p1:KDE:Frameworks5/breeze/76.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/76.0 p1:KDE:Frameworks5/breeze/75.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/19.0 p1:openSUSE:Factory/breeze/18.0 p2:KDE:Frameworks5/breeze/75.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/75.0 p1:KDE:Frameworks5/breeze/74.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/74.0 p1:KDE:Frameworks5/breeze/73.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/73.0 p1:KDE:Frameworks5/breeze/71.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/18.0 p1:openSUSE:Factory/breeze/17.0 p2:KDE:Frameworks5/breeze/71.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/71.0 p1:KDE:Frameworks5/breeze/70.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/70.0 p1:KDE:Frameworks5/breeze/69.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/69.0 p1:KDE:Frameworks5/breeze/68.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/68.0 p1:KDE:Frameworks5/breeze/67.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/67.0 p1:KDE:Frameworks5/breeze/65.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/17.0 p1:openSUSE:Factory/breeze/16.0 p2:KDE:Frameworks5/breeze/65.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/65.0 p1:KDE:Frameworks5/breeze/64.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/64.0 p1:KDE:Frameworks5/breeze/62.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/16.0 p1:openSUSE:Factory/breeze/15.0 p2:KDE:Frameworks5/breeze/62.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/62.0 p1:KDE:Frameworks5/breeze/61.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/61.0 p1:KDE:Frameworks5/breeze/60.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/60.0 p1:KDE:Frameworks5/breeze/59.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/59.0 p1:KDE:Frameworks5/breeze/58.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/58.0 p1:KDE:Frameworks5/breeze/57.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/57.0 p1:KDE:Frameworks5/breeze/55.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/15.0 p1:openSUSE:Factory/breeze/14.0 p2:KDE:Frameworks5/breeze/55.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/55.0 p1:KDE:Frameworks5/breeze/53.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/14.0 p1:openSUSE:Factory/breeze/13.0 p2:KDE:Frameworks5/breeze/53.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/53.0 p1:KDE:Frameworks5/breeze/51.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/13.0 p1:openSUSE:Factory/breeze/12.0 p2:KDE:Frameworks5/breeze/51.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/51.0 p1:KDE:Frameworks5/breeze/50.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/50.0 p1:KDE:Frameworks5/breeze/49.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/49.0 p1:KDE:Frameworks5/breeze/48.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/48.0 p1:KDE:Frameworks5/breeze/47.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/47.0 p1:KDE:Frameworks5/breeze/46.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/46.0 p1:KDE:Frameworks5/breeze/45.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/45.0 p1:KDE:Frameworks5/breeze/44.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/44.0 p1:KDE:Frameworks5/breeze/43.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/43.0 p1:KDE:Frameworks5/breeze/41.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/12.0 p1:openSUSE:Factory/breeze/11.0 p2:KDE:Frameworks5/breeze/41.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/41.0 p1:KDE:Frameworks5/breeze/40.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/40.0 p1:KDE:Frameworks5/breeze/39.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/39.0 p1:KDE:Frameworks5/breeze/38.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/11.0 p1:openSUSE:Factory/breeze/10.0 p2:KDE:Frameworks5/breeze/38.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/38.0 p1:KDE:Frameworks5/breeze/36.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/10.0 p1:openSUSE:Factory/breeze/9.0 p2:KDE:Frameworks5/breeze/36.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/36.0 p1:KDE:Frameworks5/breeze/35.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/35.0 p1:KDE:Frameworks5/breeze/33.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/9.0 p1:openSUSE:Factory/breeze/8.0 p2:KDE:Frameworks5/breeze/33.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/33.0 p1:KDE:Frameworks5/breeze/32.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/32.0 p1:KDE:Frameworks5/breeze/31.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/31.0 p1:KDE:Frameworks5/breeze/30.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/30.0 p1:KDE:Frameworks5/breeze/28.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/8.0 p1:openSUSE:Factory/breeze/7.0 p2:KDE:Frameworks5/breeze/28.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/28.0 p1:KDE:Frameworks5/breeze/27.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/27.0 p1:KDE:Frameworks5/breeze/25.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/7.0 p1:openSUSE:Factory/breeze/6.0 p2:KDE:Frameworks5/breeze/25.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/25.0 p1:KDE:Frameworks5/breeze/24.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/24.0 p1:KDE:Frameworks5/breeze/22.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/6.0 p1:openSUSE:Factory/breeze/5.0 p2:KDE:Frameworks5/breeze/22.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/22.0 p1:KDE:Frameworks5/breeze/21.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/21.0 p1:KDE:Frameworks5/breeze/20.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/20.0 p1:KDE:Frameworks5/breeze/19.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/19.0 p1:KDE:Frameworks5/breeze/18.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/18.0 p1:KDE:Frameworks5/breeze/17.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/5.0 p1:openSUSE:Factory/breeze/4.0 p2:KDE:Frameworks5/breeze/17.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/17.0 p1:KDE:Frameworks5/breeze/16.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/16.0 p1:KDE:Frameworks5/breeze/15.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/15.0 p1:KDE:Frameworks5/breeze/14.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/14.0 p1:KDE:Frameworks5/breeze/13.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/13.0 p1:KDE:Frameworks5/breeze/12.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/12.0 p1:KDE:Frameworks5/breeze/11.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/4.0 p1:openSUSE:Factory/breeze/2.0 p2:KDE:Frameworks5/breeze/11.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/11.0 p1:KDE:Frameworks5/breeze/10.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/10.0 p1:KDE:Frameworks5/breeze/9.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/9.0 p1:KDE:Frameworks5/breeze/8.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/8.0 p1:KDE:Frameworks5/breeze/6.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/2.0 p1:openSUSE:Factory/breeze/1.0 p2:KDE:Frameworks5/breeze/6.0
|
|
||||||
- devel c:KDE:Frameworks5/breeze/6.0 p1:openSUSE:Factory/breeze/1.0
|
|
||||||
- factory c:openSUSE:Factory/breeze/1.0 p1:KDE:Frameworks5/breeze/4.0
|
|
||||||
- factory c:KDE:Frameworks5/breeze/4.0 p1:KDE:Frameworks5/breeze/3.0
|
|
||||||
- factory c:KDE:Frameworks5/breeze/3.0 p1:KDE:Frameworks5/breeze/2.0
|
|
||||||
- factory c:KDE:Frameworks5/breeze/2.0 p1:KDE:Frameworks5/breeze/1.0
|
|
||||||
- factory c:KDE:Frameworks5/breeze/1.0
|
|
212
tests/fixtures/breeze-expected-tree.yaml
vendored
212
tests/fixtures/breeze-expected-tree.yaml
vendored
@ -1,212 +0,0 @@
|
|||||||
- commit: openSUSE:Factory/breeze/43.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/150.0
|
|
||||||
- commit: openSUSE:Factory/breeze/42.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/148.0
|
|
||||||
- KDE:Frameworks5/breeze/147.0
|
|
||||||
- commit: openSUSE:Factory/breeze/41.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/145.0
|
|
||||||
- commit: openSUSE:Factory/breeze/40.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/143.0
|
|
||||||
- KDE:Frameworks5/breeze/142.0
|
|
||||||
- KDE:Frameworks5/breeze/141.0
|
|
||||||
- commit: openSUSE:Factory/breeze/39.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/139.0
|
|
||||||
- commit: openSUSE:Factory/breeze/38.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/137.0
|
|
||||||
- KDE:Frameworks5/breeze/136.0
|
|
||||||
- KDE:Frameworks5/breeze/135.0
|
|
||||||
- KDE:Frameworks5/breeze/134.0
|
|
||||||
- commit: openSUSE:Factory/breeze/37.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/132.0
|
|
||||||
- commit: openSUSE:Factory/breeze/36.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/130.0
|
|
||||||
- commit: openSUSE:Factory/breeze/35.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/128.0
|
|
||||||
- KDE:Frameworks5/breeze/127.0
|
|
||||||
- KDE:Frameworks5/breeze/126.034
|
|
||||||
- KDE:Frameworks5/breeze/126.0
|
|
||||||
- KDE:Frameworks5/breeze/125.0
|
|
||||||
- KDE:Frameworks5/breeze/124.0
|
|
||||||
- KDE:Frameworks5/breeze/123.0
|
|
||||||
- KDE:Frameworks5/breeze/122.0
|
|
||||||
- commit: openSUSE:Factory/breeze/34.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5:LTS/breeze/14.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/13.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/12.0
|
|
||||||
- commit: openSUSE:Factory/breeze/33.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5:LTS/breeze/11.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/10.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/9.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/8.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/7.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/6.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/5.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/4.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/3.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/2.0
|
|
||||||
- KDE:Frameworks5:LTS/breeze/1.0
|
|
||||||
- commit: openSUSE:Factory/breeze/32.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/120.0
|
|
||||||
- commit: openSUSE:Factory/breeze/31.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/117.0
|
|
||||||
- commit: openSUSE:Factory/breeze/30.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/116.0
|
|
||||||
- KDE:Frameworks5/breeze/115.0
|
|
||||||
- KDE:Frameworks5/breeze/113.0
|
|
||||||
- KDE:Frameworks5/breeze/112.0
|
|
||||||
- commit: openSUSE:Factory/breeze/29.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/111.0
|
|
||||||
- KDE:Frameworks5/breeze/110.0
|
|
||||||
- KDE:Frameworks5/breeze/109.0
|
|
||||||
- KDE:Frameworks5/breeze/108.0
|
|
||||||
- KDE:Frameworks5/breeze/107.0
|
|
||||||
- commit: openSUSE:Factory/breeze/28.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/105.0
|
|
||||||
- commit: openSUSE:Factory/breeze/27.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/103.0
|
|
||||||
- commit: openSUSE:Factory/breeze/26.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/100.0
|
|
||||||
- commit: openSUSE:Factory/breeze/25.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/99.0
|
|
||||||
- KDE:Frameworks5/breeze/98.0
|
|
||||||
- KDE:Frameworks5/breeze/97.0
|
|
||||||
- commit: openSUSE:Factory/breeze/24.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/95.0
|
|
||||||
- commit: openSUSE:Factory/breeze/23.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/93.0
|
|
||||||
- commit: openSUSE:Factory/breeze/22.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/91.0
|
|
||||||
- commit: openSUSE:Factory/breeze/21.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/88.0
|
|
||||||
- commit: openSUSE:Factory/breeze/20.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/87.0
|
|
||||||
- KDE:Frameworks5/breeze/86.0
|
|
||||||
- KDE:Frameworks5/breeze/85.0
|
|
||||||
- KDE:Frameworks5/breeze/84.0
|
|
||||||
- KDE:Frameworks5/breeze/83.0
|
|
||||||
- KDE:Frameworks5/breeze/82.0
|
|
||||||
- KDE:Frameworks5/breeze/81.0
|
|
||||||
- KDE:Frameworks5/breeze/80.0
|
|
||||||
- KDE:Frameworks5/breeze/79.0
|
|
||||||
- KDE:Frameworks5/breeze/78.0
|
|
||||||
- KDE:Frameworks5/breeze/76.0
|
|
||||||
- commit: openSUSE:Factory/breeze/19.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/75.0
|
|
||||||
- KDE:Frameworks5/breeze/74.0
|
|
||||||
- KDE:Frameworks5/breeze/73.0
|
|
||||||
- commit: openSUSE:Factory/breeze/18.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/71.0
|
|
||||||
- KDE:Frameworks5/breeze/70.0
|
|
||||||
- KDE:Frameworks5/breeze/69.0
|
|
||||||
- KDE:Frameworks5/breeze/68.0
|
|
||||||
- KDE:Frameworks5/breeze/67.0
|
|
||||||
- commit: openSUSE:Factory/breeze/17.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/65.0
|
|
||||||
- KDE:Frameworks5/breeze/64.0
|
|
||||||
- commit: openSUSE:Factory/breeze/16.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/62.0
|
|
||||||
- KDE:Frameworks5/breeze/61.0
|
|
||||||
- KDE:Frameworks5/breeze/60.0
|
|
||||||
- KDE:Frameworks5/breeze/59.0
|
|
||||||
- KDE:Frameworks5/breeze/58.0
|
|
||||||
- KDE:Frameworks5/breeze/57.0
|
|
||||||
- commit: openSUSE:Factory/breeze/15.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/55.0
|
|
||||||
- commit: openSUSE:Factory/breeze/14.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/53.0
|
|
||||||
- commit: openSUSE:Factory/breeze/13.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/51.0
|
|
||||||
- KDE:Frameworks5/breeze/50.0
|
|
||||||
- KDE:Frameworks5/breeze/49.0
|
|
||||||
- KDE:Frameworks5/breeze/48.0
|
|
||||||
- KDE:Frameworks5/breeze/47.0
|
|
||||||
- KDE:Frameworks5/breeze/46.0
|
|
||||||
- KDE:Frameworks5/breeze/45.0
|
|
||||||
- KDE:Frameworks5/breeze/44.0
|
|
||||||
- KDE:Frameworks5/breeze/43.0
|
|
||||||
- commit: openSUSE:Factory/breeze/12.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/41.0
|
|
||||||
- KDE:Frameworks5/breeze/40.0
|
|
||||||
- KDE:Frameworks5/breeze/39.0
|
|
||||||
- commit: openSUSE:Factory/breeze/11.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/38.0
|
|
||||||
- commit: openSUSE:Factory/breeze/10.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/36.0
|
|
||||||
- KDE:Frameworks5/breeze/35.0
|
|
||||||
- commit: openSUSE:Factory/breeze/9.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/33.0
|
|
||||||
- KDE:Frameworks5/breeze/32.0
|
|
||||||
- KDE:Frameworks5/breeze/31.0
|
|
||||||
- KDE:Frameworks5/breeze/30.0
|
|
||||||
- commit: openSUSE:Factory/breeze/8.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/28.0
|
|
||||||
- KDE:Frameworks5/breeze/27.0
|
|
||||||
- commit: openSUSE:Factory/breeze/7.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/25.0
|
|
||||||
- KDE:Frameworks5/breeze/24.0
|
|
||||||
- commit: openSUSE:Factory/breeze/6.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/22.0
|
|
||||||
- KDE:Frameworks5/breeze/21.0
|
|
||||||
- KDE:Frameworks5/breeze/20.0
|
|
||||||
- KDE:Frameworks5/breeze/19.0
|
|
||||||
- KDE:Frameworks5/breeze/18.0
|
|
||||||
- commit: openSUSE:Factory/breeze/5.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/17.0
|
|
||||||
- KDE:Frameworks5/breeze/16.0
|
|
||||||
- KDE:Frameworks5/breeze/15.0
|
|
||||||
- KDE:Frameworks5/breeze/14.0
|
|
||||||
- KDE:Frameworks5/breeze/13.0
|
|
||||||
- KDE:Frameworks5/breeze/12.0
|
|
||||||
- commit: openSUSE:Factory/breeze/4.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/11.0
|
|
||||||
- KDE:Frameworks5/breeze/10.0
|
|
||||||
- KDE:Frameworks5/breeze/9.0
|
|
||||||
- KDE:Frameworks5/breeze/8.0
|
|
||||||
- commit: openSUSE:Factory/breeze/2.0
|
|
||||||
merged:
|
|
||||||
- KDE:Frameworks5/breeze/6.0
|
|
||||||
- commit: openSUSE:Factory/breeze/1.0
|
|
||||||
- commit: KDE:Frameworks5/breeze/4.0
|
|
||||||
- commit: KDE:Frameworks5/breeze/3.0
|
|
||||||
- commit: KDE:Frameworks5/breeze/2.0
|
|
||||||
- commit: KDE:Frameworks5/breeze/1.0
|
|
9551
tests/fixtures/firewalld-data.yaml
vendored
9551
tests/fixtures/firewalld-data.yaml
vendored
File diff suppressed because it is too large
Load Diff
173
tests/fixtures/firewalld-expected-list.yaml
vendored
173
tests/fixtures/firewalld-expected-list.yaml
vendored
@ -1,173 +0,0 @@
|
|||||||
- factory c:openSUSE:Factory/firewalld/73.0 p1:openSUSE:Factory/firewalld/72.0 p2:security:netfilter/firewalld/131.0
|
|
||||||
- devel c:security:netfilter/firewalld/131.0 p1:security:netfilter/firewalld/129.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/72.0 p1:openSUSE:Factory/firewalld/71.0 p2:security:netfilter/firewalld/129.0
|
|
||||||
- devel c:security:netfilter/firewalld/129.0 p1:security:netfilter/firewalld/128.0
|
|
||||||
- devel c:security:netfilter/firewalld/128.0 p1:security:netfilter/firewalld/127.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/71.0 p1:openSUSE:Factory/firewalld/70.0 p2:security:netfilter/firewalld/127.0
|
|
||||||
- devel c:security:netfilter/firewalld/127.0 p1:security:netfilter/firewalld/126.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/70.0 p1:openSUSE:Factory/firewalld/69.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/69.0 p1:openSUSE:Factory/firewalld/68.0 p2:security:netfilter/firewalld/126.0
|
|
||||||
- devel c:security:netfilter/firewalld/126.0 p1:security:netfilter/firewalld/125.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/68.0 p1:openSUSE:Factory/firewalld/67.0 p2:security:netfilter/firewalld/125.0
|
|
||||||
- devel c:security:netfilter/firewalld/125.0 p1:security:netfilter/firewalld/124.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/67.0 p1:openSUSE:Factory/firewalld/66.0 p2:security:netfilter/firewalld/124.0
|
|
||||||
- devel c:security:netfilter/firewalld/124.0 p1:security:netfilter/firewalld/123.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/66.0 p1:openSUSE:Factory/firewalld/65.0 p2:security:netfilter/firewalld/123.0
|
|
||||||
- devel c:security:netfilter/firewalld/123.0 p1:security:netfilter/firewalld/122.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/65.0 p1:openSUSE:Factory/firewalld/64.0 p2:security:netfilter/firewalld/122.0
|
|
||||||
- devel c:security:netfilter/firewalld/122.0 p1:security:netfilter/firewalld/121.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/64.0 p1:openSUSE:Factory/firewalld/63.0 p2:security:netfilter/firewalld/121.0
|
|
||||||
- devel c:security:netfilter/firewalld/121.0 p1:security:netfilter/firewalld/120.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/63.0 p1:openSUSE:Factory/firewalld/62.0 p2:security:netfilter/firewalld/120.0
|
|
||||||
- devel c:security:netfilter/firewalld/120.0 p1:security:netfilter/firewalld/119.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/62.0 p1:openSUSE:Factory/firewalld/61.0 p2:security:netfilter/firewalld/119.0
|
|
||||||
- devel c:security:netfilter/firewalld/119.0 p1:security:netfilter/firewalld/118.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/61.0 p1:openSUSE:Factory/firewalld/60.0 p2:security:netfilter/firewalld/118.0
|
|
||||||
- devel c:security:netfilter/firewalld/118.0 p1:security:netfilter/firewalld/117.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/60.0 p1:openSUSE:Factory/firewalld/59.0 p2:security:netfilter/firewalld/117.0
|
|
||||||
- devel c:security:netfilter/firewalld/117.0 p1:security:netfilter/firewalld/116.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/59.0 p1:openSUSE:Factory/firewalld/58.0 p2:security:netfilter/firewalld/116.0
|
|
||||||
- devel c:security:netfilter/firewalld/116.0 p1:security:netfilter/firewalld/115.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/58.0 p1:openSUSE:Factory/firewalld/57.0 p2:security:netfilter/firewalld/115.0
|
|
||||||
- devel c:security:netfilter/firewalld/115.0 p1:security:netfilter/firewalld/114.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/57.0 p1:openSUSE:Factory/firewalld/56.0 p2:security:netfilter/firewalld/114.0
|
|
||||||
- devel c:security:netfilter/firewalld/114.0 p1:security:netfilter/firewalld/113.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/56.0 p1:openSUSE:Factory/firewalld/55.0 p2:security:netfilter/firewalld/113.0
|
|
||||||
- devel c:security:netfilter/firewalld/113.0 p1:security:netfilter/firewalld/112.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/55.0 p1:openSUSE:Factory/firewalld/54.0 p2:security:netfilter/firewalld/112.0
|
|
||||||
- devel c:security:netfilter/firewalld/112.0 p1:security:netfilter/firewalld/111.0
|
|
||||||
- devel c:security:netfilter/firewalld/111.0 p1:security:netfilter/firewalld/110.0
|
|
||||||
- devel c:security:netfilter/firewalld/110.0 p1:security:netfilter/firewalld/109.0
|
|
||||||
- devel c:security:netfilter/firewalld/109.0 p1:security:netfilter/firewalld/108.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/54.0 p1:openSUSE:Factory/firewalld/53.0 p2:security:netfilter/firewalld/108.0
|
|
||||||
- devel c:security:netfilter/firewalld/108.0 p1:security:netfilter/firewalld/107.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/53.0 p1:openSUSE:Factory/firewalld/52.0 p2:security:netfilter/firewalld/107.0
|
|
||||||
- devel c:security:netfilter/firewalld/107.0 p1:security:netfilter/firewalld/106.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/52.0 p1:openSUSE:Factory/firewalld/51.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/51.0 p1:openSUSE:Factory/firewalld/50.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/50.0 p1:openSUSE:Factory/firewalld/49.0 p2:security:netfilter/firewalld/106.0
|
|
||||||
- devel c:security:netfilter/firewalld/106.0 p1:security:netfilter/firewalld/105.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/49.0 p1:openSUSE:Factory/firewalld/48.0 p2:security:netfilter/firewalld/105.0
|
|
||||||
- devel c:security:netfilter/firewalld/105.0 p1:security:netfilter/firewalld/104.0
|
|
||||||
- devel c:security:netfilter/firewalld/104.0 p1:security:netfilter/firewalld/103.0
|
|
||||||
- devel c:security:netfilter/firewalld/103.0 p1:security:netfilter/firewalld/102.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/48.0 p1:openSUSE:Factory/firewalld/47.0 p2:security:netfilter/firewalld/102.0
|
|
||||||
- devel c:security:netfilter/firewalld/102.0 p1:security:netfilter/firewalld/101.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/47.0 p1:openSUSE:Factory/firewalld/46.0 p2:security:netfilter/firewalld/101.0
|
|
||||||
- devel c:security:netfilter/firewalld/101.0 p1:security:netfilter/firewalld/100.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/46.0 p1:openSUSE:Factory/firewalld/45.0 p2:security:netfilter/firewalld/100.0
|
|
||||||
- devel c:security:netfilter/firewalld/100.0 p1:security:netfilter/firewalld/99.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/45.0 p1:openSUSE:Factory/firewalld/44.0 p2:security:netfilter/firewalld/99.0
|
|
||||||
- devel c:security:netfilter/firewalld/99.0 p1:security:netfilter/firewalld/98.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/44.0 p1:openSUSE:Factory/firewalld/43.0 p2:security:netfilter/firewalld/98.0
|
|
||||||
- devel c:security:netfilter/firewalld/98.0 p1:security:netfilter/firewalld/97.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/43.0 p1:openSUSE:Factory/firewalld/42.0 p2:security:netfilter/firewalld/97.0
|
|
||||||
- devel c:security:netfilter/firewalld/97.0 p1:security:netfilter/firewalld/96.0
|
|
||||||
- devel c:security:netfilter/firewalld/96.0 p1:security:netfilter/firewalld/95.0
|
|
||||||
- devel c:security:netfilter/firewalld/95.0 p1:security:netfilter/firewalld/94.0
|
|
||||||
- devel c:security:netfilter/firewalld/94.0 p1:security:netfilter/firewalld/93.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/42.0 p1:openSUSE:Factory/firewalld/41.0 p2:security:netfilter/firewalld/93.0
|
|
||||||
- devel c:security:netfilter/firewalld/93.0 p1:security:netfilter/firewalld/92.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/41.0 p1:openSUSE:Factory/firewalld/40.0 p2:security:netfilter/firewalld/92.0
|
|
||||||
- devel c:security:netfilter/firewalld/92.0 p1:security:netfilter/firewalld/91.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/40.0 p1:openSUSE:Factory/firewalld/39.0 p2:security:netfilter/firewalld/91.0
|
|
||||||
- devel c:security:netfilter/firewalld/91.0 p1:security:netfilter/firewalld/90.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/39.0 p1:openSUSE:Factory/firewalld/38.0 p2:security:netfilter/firewalld/90.0
|
|
||||||
- devel c:security:netfilter/firewalld/90.0 p1:security:netfilter/firewalld/89.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/38.0 p1:openSUSE:Factory/firewalld/37.0 p2:security:netfilter/firewalld/89.0
|
|
||||||
- devel c:security:netfilter/firewalld/89.0 p1:security:netfilter/firewalld/88.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/37.0 p1:openSUSE:Factory/firewalld/36.0 p2:security:netfilter/firewalld/88.0
|
|
||||||
- devel c:security:netfilter/firewalld/88.0 p1:security:netfilter/firewalld/87.0
|
|
||||||
- devel c:security:netfilter/firewalld/87.0 p1:security:netfilter/firewalld/86.0
|
|
||||||
- devel c:security:netfilter/firewalld/86.0 p1:security:netfilter/firewalld/85.0
|
|
||||||
- devel c:security:netfilter/firewalld/85.0 p1:security:netfilter/firewalld/84.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/36.0 p1:openSUSE:Factory/firewalld/35.0 p2:security:netfilter/firewalld/84.0
|
|
||||||
- devel c:security:netfilter/firewalld/84.0 p1:security:netfilter/firewalld/83.0
|
|
||||||
- devel c:security:netfilter/firewalld/83.0 p1:security:netfilter/firewalld/82.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/35.0 p1:openSUSE:Factory/firewalld/34.0 p2:security:netfilter/firewalld/82.0
|
|
||||||
- devel c:security:netfilter/firewalld/82.0 p1:security:netfilter/firewalld/81.0
|
|
||||||
- devel c:security:netfilter/firewalld/81.0 p1:security:netfilter/firewalld/80.0
|
|
||||||
- devel c:security:netfilter/firewalld/80.0 p1:security:netfilter/firewalld/79.0
|
|
||||||
- devel c:security:netfilter/firewalld/79.0 p1:security:netfilter/firewalld/78.0
|
|
||||||
- devel c:security:netfilter/firewalld/78.0 p1:security:netfilter/firewalld/77.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/34.0 p1:openSUSE:Factory/firewalld/33.0 p2:security:netfilter/firewalld/77.0
|
|
||||||
- devel c:security:netfilter/firewalld/77.0 p1:security:netfilter/firewalld/76.0
|
|
||||||
- devel c:security:netfilter/firewalld/76.0 p1:security:netfilter/firewalld/75.0
|
|
||||||
- devel c:security:netfilter/firewalld/75.0 p1:security:netfilter/firewalld/74.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/33.0 p1:openSUSE:Factory/firewalld/32.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/32.0 p1:openSUSE:Factory/firewalld/31.0 p2:security:netfilter/firewalld/74.0
|
|
||||||
- devel c:security:netfilter/firewalld/74.0 p1:security:netfilter/firewalld/71.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/31.0 p1:openSUSE:Factory/firewalld/30.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/30.0 p1:openSUSE:Factory/firewalld/29.0 p2:security:netfilter/firewalld/71.0
|
|
||||||
- devel c:security:netfilter/firewalld/71.0 p1:security:netfilter/firewalld/69.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/29.0 p1:openSUSE:Factory/firewalld/28.0 p2:security:netfilter/firewalld/69.0
|
|
||||||
- devel c:security:netfilter/firewalld/69.0 p1:security:netfilter/firewalld/68.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/28.0 p1:openSUSE:Factory/firewalld/27.0 p2:security:netfilter/firewalld/68.0
|
|
||||||
- devel c:security:netfilter/firewalld/68.0 p1:security:netfilter/firewalld/67.0
|
|
||||||
- devel c:security:netfilter/firewalld/67.0 p1:security:netfilter/firewalld/65.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/27.0 p1:openSUSE:Factory/firewalld/26.0 p2:security:netfilter/firewalld/65.0
|
|
||||||
- devel c:security:netfilter/firewalld/65.0 p1:security:netfilter/firewalld/63.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/26.0 p1:openSUSE:Factory/firewalld/25.0 p2:security:netfilter/firewalld/63.0
|
|
||||||
- devel c:security:netfilter/firewalld/63.0 p1:security:netfilter/firewalld/61.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/25.0 p1:openSUSE:Factory/firewalld/24.0 p2:security:netfilter/firewalld/61.0
|
|
||||||
- devel c:security:netfilter/firewalld/61.0 p1:security:netfilter/firewalld/60.0
|
|
||||||
- devel c:security:netfilter/firewalld/60.0 p1:security:netfilter/firewalld/59.0
|
|
||||||
- devel c:security:netfilter/firewalld/59.0 p1:security:netfilter/firewalld/57.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/24.0 p1:openSUSE:Factory/firewalld/23.0 p2:security:netfilter/firewalld/57.0
|
|
||||||
- devel c:security:netfilter/firewalld/57.0 p1:security:netfilter/firewalld/55.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/23.0 p1:openSUSE:Factory/firewalld/22.0 p2:security:netfilter/firewalld/55.0
|
|
||||||
- devel c:security:netfilter/firewalld/55.0 p1:security:netfilter/firewalld/54.0
|
|
||||||
- devel c:security:netfilter/firewalld/54.0 p1:security:netfilter/firewalld/53.0
|
|
||||||
- devel c:security:netfilter/firewalld/53.0 p1:security:netfilter/firewalld/51.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/22.0 p1:openSUSE:Factory/firewalld/21.0 p2:security:netfilter/firewalld/51.0
|
|
||||||
- devel c:security:netfilter/firewalld/51.0 p1:security:netfilter/firewalld/50.0
|
|
||||||
- devel c:security:netfilter/firewalld/50.0 p1:security:netfilter/firewalld/48.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/21.0 p1:openSUSE:Factory/firewalld/20.0 p2:security:netfilter/firewalld/48.0
|
|
||||||
- devel c:security:netfilter/firewalld/48.0 p1:security:netfilter/firewalld/47.0
|
|
||||||
- devel c:security:netfilter/firewalld/47.0 p1:security:netfilter/firewalld/45.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/20.0 p1:openSUSE:Factory/firewalld/19.0 p2:security:netfilter/firewalld/45.0
|
|
||||||
- devel c:security:netfilter/firewalld/45.0 p1:security:netfilter/firewalld/43.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/19.0 p1:openSUSE:Factory/firewalld/18.0 p2:security:netfilter/firewalld/43.0
|
|
||||||
- devel c:security:netfilter/firewalld/43.0 p1:security:netfilter/firewalld/41.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/18.0 p1:openSUSE:Factory/firewalld/17.0 p2:security:netfilter/firewalld/41.0
|
|
||||||
- devel c:security:netfilter/firewalld/41.0 p1:security:netfilter/firewalld/39.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/17.0 p1:openSUSE:Factory/firewalld/16.0 p2:security:netfilter/firewalld/39.0
|
|
||||||
- devel c:security:netfilter/firewalld/39.0 p1:security:netfilter/firewalld/38.0
|
|
||||||
- devel c:security:netfilter/firewalld/38.0 p1:security:netfilter/firewalld/36.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/16.0 p1:openSUSE:Factory/firewalld/15.0 p2:security:netfilter/firewalld/36.0
|
|
||||||
- devel c:security:netfilter/firewalld/36.0 p1:security:netfilter/firewalld/34.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/15.0 p1:openSUSE:Factory/firewalld/14.0 p2:security:netfilter/firewalld/34.0
|
|
||||||
- devel c:security:netfilter/firewalld/34.0 p1:security:netfilter/firewalld/32.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/14.0 p1:openSUSE:Factory/firewalld/13.0 p2:security:netfilter/firewalld/32.0
|
|
||||||
- devel c:security:netfilter/firewalld/32.0 p1:security:netfilter/firewalld/30.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/13.0 p1:openSUSE:Factory/firewalld/12.0 p2:security:netfilter/firewalld/30.0
|
|
||||||
- devel c:security:netfilter/firewalld/30.0 p1:security:netfilter/firewalld/28.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/12.0 p1:openSUSE:Factory/firewalld/11.0 p2:security:netfilter/firewalld/28.0
|
|
||||||
- devel c:security:netfilter/firewalld/28.0 p1:security:netfilter/firewalld/26.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/11.0 p1:openSUSE:Factory/firewalld/10.0 p2:security:netfilter/firewalld/26.0
|
|
||||||
- devel c:security:netfilter/firewalld/26.0 p1:security:netfilter/firewalld/24.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/10.0 p1:openSUSE:Factory/firewalld/9.0 p2:security:netfilter/firewalld/24.0
|
|
||||||
- devel c:security:netfilter/firewalld/24.0 p1:security:netfilter/firewalld/22.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/9.0 p1:openSUSE:Factory/firewalld/8.0 p2:security:netfilter/firewalld/22.0
|
|
||||||
- devel c:security:netfilter/firewalld/22.0 p1:security:netfilter/firewalld/21.0
|
|
||||||
- devel c:security:netfilter/firewalld/21.0 p1:security:netfilter/firewalld/19.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/8.0 p1:openSUSE:Factory/firewalld/7.0 p2:security:netfilter/firewalld/19.0
|
|
||||||
- devel c:security:netfilter/firewalld/19.0 p1:security:netfilter/firewalld/17.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/7.0 p1:openSUSE:Factory/firewalld/6.0 p2:security:netfilter/firewalld/17.0
|
|
||||||
- devel c:security:netfilter/firewalld/17.0 p1:security:netfilter/firewalld/15.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/6.0 p1:openSUSE:Factory/firewalld/5.0 p2:security:netfilter/firewalld/15.0
|
|
||||||
- devel c:security:netfilter/firewalld/15.0 p1:security:netfilter/firewalld/13.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/5.0 p1:openSUSE:Factory/firewalld/4.0 p2:security:netfilter/firewalld/13.0
|
|
||||||
- devel c:security:netfilter/firewalld/13.0 p1:security:netfilter/firewalld/11.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/4.0 p1:openSUSE:Factory/firewalld/3.0 p2:security:netfilter/firewalld/11.0
|
|
||||||
- devel c:security:netfilter/firewalld/11.0 p1:security:netfilter/firewalld/9.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/3.0 p1:openSUSE:Factory/firewalld/2.0 p2:security:netfilter/firewalld/9.0
|
|
||||||
- devel c:security:netfilter/firewalld/9.0 p1:security:netfilter/firewalld/8.0
|
|
||||||
- devel c:security:netfilter/firewalld/8.0 p1:security:netfilter/firewalld/6.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/2.0 p1:openSUSE:Factory/firewalld/1.0 p2:security:netfilter/firewalld/6.0
|
|
||||||
- devel c:security:netfilter/firewalld/6.0 p1:security:netfilter/firewalld/5.0
|
|
||||||
- devel c:security:netfilter/firewalld/5.0 p1:security:netfilter/firewalld/4.0
|
|
||||||
- devel c:security:netfilter/firewalld/4.0 p1:openSUSE:Factory/firewalld/1.0
|
|
||||||
- factory c:openSUSE:Factory/firewalld/1.0 p1:security:netfilter/firewalld/2.0
|
|
||||||
- factory c:security:netfilter/firewalld/2.0 p1:security:netfilter/firewalld/1.0
|
|
||||||
- factory c:security:netfilter/firewalld/1.0
|
|
240
tests/fixtures/firewalld-expected-tree.yaml
vendored
240
tests/fixtures/firewalld-expected-tree.yaml
vendored
@ -1,240 +0,0 @@
|
|||||||
- commit: openSUSE:Factory/firewalld/73.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/131.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/72.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/129.0
|
|
||||||
- security:netfilter/firewalld/128.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/71.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/127.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/70.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/69.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/126.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/68.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/125.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/67.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/124.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/66.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/123.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/65.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/122.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/64.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/121.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/63.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/120.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/62.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/119.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/61.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/118.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/60.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/117.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/59.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/116.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/58.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/115.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/57.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/114.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/56.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/113.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/55.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/112.0
|
|
||||||
- security:netfilter/firewalld/111.0
|
|
||||||
- security:netfilter/firewalld/110.0
|
|
||||||
- security:netfilter/firewalld/109.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/54.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/108.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/53.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/107.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/52.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/51.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/50.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/106.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/49.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/105.0
|
|
||||||
- security:netfilter/firewalld/104.0
|
|
||||||
- security:netfilter/firewalld/103.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/48.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/102.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/47.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/101.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/46.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/100.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/45.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/99.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/44.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/98.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/43.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/97.0
|
|
||||||
- security:netfilter/firewalld/96.0
|
|
||||||
- security:netfilter/firewalld/95.0
|
|
||||||
- security:netfilter/firewalld/94.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/42.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/93.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/41.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/92.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/40.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/91.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/39.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/90.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/38.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/89.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/37.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/88.0
|
|
||||||
- security:netfilter/firewalld/87.0
|
|
||||||
- security:netfilter/firewalld/86.0
|
|
||||||
- security:netfilter/firewalld/85.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/36.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/84.0
|
|
||||||
- security:netfilter/firewalld/83.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/35.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/82.0
|
|
||||||
- security:netfilter/firewalld/81.0
|
|
||||||
- security:netfilter/firewalld/80.0
|
|
||||||
- security:netfilter/firewalld/79.0
|
|
||||||
- security:netfilter/firewalld/78.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/34.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/77.0
|
|
||||||
- security:netfilter/firewalld/76.0
|
|
||||||
- security:netfilter/firewalld/75.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/33.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/32.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/74.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/31.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/30.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/71.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/29.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/69.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/28.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/68.0
|
|
||||||
- security:netfilter/firewalld/67.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/27.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/65.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/26.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/63.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/25.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/61.0
|
|
||||||
- security:netfilter/firewalld/60.0
|
|
||||||
- security:netfilter/firewalld/59.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/24.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/57.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/23.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/55.0
|
|
||||||
- security:netfilter/firewalld/54.0
|
|
||||||
- security:netfilter/firewalld/53.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/22.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/51.0
|
|
||||||
- security:netfilter/firewalld/50.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/21.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/48.0
|
|
||||||
- security:netfilter/firewalld/47.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/20.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/45.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/19.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/43.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/18.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/41.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/17.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/39.0
|
|
||||||
- security:netfilter/firewalld/38.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/16.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/36.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/15.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/34.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/14.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/32.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/13.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/30.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/12.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/28.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/11.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/26.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/10.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/24.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/9.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/22.0
|
|
||||||
- security:netfilter/firewalld/21.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/8.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/19.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/7.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/17.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/6.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/15.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/5.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/13.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/4.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/11.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/3.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/9.0
|
|
||||||
- security:netfilter/firewalld/8.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/2.0
|
|
||||||
merged:
|
|
||||||
- security:netfilter/firewalld/6.0
|
|
||||||
- security:netfilter/firewalld/5.0
|
|
||||||
- security:netfilter/firewalld/4.0
|
|
||||||
- commit: openSUSE:Factory/firewalld/1.0
|
|
||||||
- commit: security:netfilter/firewalld/2.0
|
|
||||||
- commit: security:netfilter/firewalld/1.0
|
|
@ -65,15 +65,6 @@ class TestTreeMethods(unittest.TestCase):
|
|||||||
def test_000update_repos_tree(self):
|
def test_000update_repos_tree(self):
|
||||||
self.verify_package("000update-repos")
|
self.verify_package("000update-repos")
|
||||||
|
|
||||||
def test_breeze_tree(self):
|
|
||||||
self.verify_package("breeze")
|
|
||||||
|
|
||||||
def test_firewalld_tree(self):
|
|
||||||
self.verify_package("firewalld")
|
|
||||||
|
|
||||||
def test_FastCGI_tree(self):
|
|
||||||
self.verify_package("FastCGI")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
cd /space/dmueller/git-importer
|
|
||||||
|
|
||||||
source credentials.sh
|
|
||||||
|
|
||||||
while true; do
|
|
||||||
for i in $PWD/tasks/*; do
|
|
||||||
if test -f "$i"; then
|
|
||||||
echo "$(date): Importing $(basename $i)"
|
|
||||||
if ! python3 ./git-importer.py -c repos/.cache $(basename $i); then
|
|
||||||
mkdir -p $PWD/failed-tasks
|
|
||||||
mv -f $i $PWD/failed-tasks
|
|
||||||
fi
|
|
||||||
rm -f $i
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
inotifywait -q -e create $PWD/tasks
|
|
||||||
done
|
|
Loading…
x
Reference in New Issue
Block a user