Merge pull request #1958 from Vogtinator/containercleaner
Add program for cleaning up maintenance_release projects for containers
This commit is contained in:
commit
b4083b0861
151
container_cleaner.py
Executable file
151
container_cleaner.py
Executable file
@ -0,0 +1,151 @@
|
||||
#!/usr/bin/python2
|
||||
# (c) 2019 fvogt@suse.de
|
||||
# GPLv3-only
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import osc.conf
|
||||
import osc.core
|
||||
import logging
|
||||
import ToolBase
|
||||
import sys
|
||||
import re
|
||||
from lxml import etree as xml
|
||||
|
||||
|
||||
class ContainerCleaner(ToolBase.ToolBase):
|
||||
def __init__(self):
|
||||
ToolBase.ToolBase.__init__(self)
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def getDirEntries(self, path):
|
||||
url = self.makeurl(path)
|
||||
directory = xml.parse(self.retried_GET(url))
|
||||
return directory.xpath("entry/@name")
|
||||
|
||||
def getDirBinaries(self, path):
|
||||
url = self.makeurl(path)
|
||||
directory = xml.parse(self.retried_GET(url))
|
||||
return directory.xpath("binary/@filename")
|
||||
|
||||
def findSourcepkgsToDelete(self, project):
|
||||
# Get a list of all images
|
||||
srccontainers = self.getDirEntries(["source", project])
|
||||
|
||||
# Sort them into buckets for each package:
|
||||
# {"opensuse-tumbleweed-image": ["opensuse-tumbleweed-image.20190402134201", ...]}
|
||||
buckets = {}
|
||||
regex_maintenance_release = re.compile(R"^(.+)\.[0-9]+$")
|
||||
for srccontainer in srccontainers:
|
||||
# Get the right bucket
|
||||
match = regex_maintenance_release.match(srccontainer)
|
||||
if match:
|
||||
# Maintenance release
|
||||
package = match.group(1)
|
||||
else:
|
||||
# Not renamed
|
||||
package = srccontainer
|
||||
|
||||
if package not in buckets:
|
||||
buckets[package] = []
|
||||
|
||||
buckets[package] += [srccontainer]
|
||||
|
||||
for package in buckets:
|
||||
# Sort each bucket: Newest provider first
|
||||
buckets[package].sort(reverse=True)
|
||||
logging.debug("Found %d providers of %s", len(buckets[package]), package)
|
||||
|
||||
# Get a hash for sourcecontainer -> arch with binaries
|
||||
# {"opensuse-tumbleweed-image.20190309164844": ["aarch64", "armv7l", "armv6l"],
|
||||
# "kubic-pause-image.20190306124139": ["x86_64", "i586"], ... }
|
||||
srccontainerarchs = {}
|
||||
|
||||
archs = self.getDirEntries(["build", project, "containers"])
|
||||
regex_srccontainer = re.compile(R"^([^:]+)(:[^:]+)?$")
|
||||
for arch in archs:
|
||||
buildcontainers = self.getDirEntries(["build", project, "containers", arch])
|
||||
for buildcontainer in buildcontainers:
|
||||
bins = self.getDirBinaries(["build", project, "containers", arch, buildcontainer])
|
||||
if len(bins) > 0:
|
||||
match = regex_srccontainer.match(buildcontainer)
|
||||
if not match:
|
||||
raise Exception("Could not map %s to source container" % buildcontainer)
|
||||
|
||||
srccontainer = match.group(1)
|
||||
if srccontainer not in srccontainers:
|
||||
raise Exception("Mapped %s to wrong source container (%s)" % (buildcontainer, srccontainer))
|
||||
|
||||
if srccontainer not in srccontainerarchs:
|
||||
srccontainerarchs[srccontainer] = []
|
||||
|
||||
logging.debug("%s provides binaries for %s", srccontainer, arch)
|
||||
srccontainerarchs[srccontainer] += [arch]
|
||||
|
||||
# Now go through each bucket and find out what doesn't contribute to the newest five
|
||||
can_delete = []
|
||||
for package in buckets:
|
||||
# {"x86_64": 1, "aarch64": 2, ...}
|
||||
archs_found = {}
|
||||
for arch in archs:
|
||||
archs_found[arch] = 0
|
||||
|
||||
for srccontainer in buckets[package]:
|
||||
contributes = False
|
||||
if srccontainer in srccontainerarchs:
|
||||
for arch in srccontainerarchs[srccontainer]:
|
||||
if archs_found[arch] < 5:
|
||||
archs_found[arch] += 1
|
||||
contributes = True
|
||||
|
||||
if contributes:
|
||||
logging.debug("%s contributes to %s", srccontainer, package)
|
||||
else:
|
||||
logging.info("%s does not contribute", srccontainer)
|
||||
if len([count for count in archs_found.values() if count > 0]) == 0:
|
||||
# If there are A, B, C and D, with only C and D providing binaries,
|
||||
# A and B aren't deleted because they have newer sources. This is
|
||||
# to avoid deleting something due to unforeseen circumstances, e.g.
|
||||
# OBS didn't copy the binaries yet.
|
||||
logging.info("No newer provider found either, ignoring")
|
||||
else:
|
||||
can_delete += [srccontainer]
|
||||
|
||||
return can_delete
|
||||
|
||||
def run(self, project):
|
||||
packages = self.findSourcepkgsToDelete(project)
|
||||
|
||||
for package in packages:
|
||||
url = self.makeurl(["source", project, package])
|
||||
if self.dryrun:
|
||||
logging.info("DELETE %s", url)
|
||||
else:
|
||||
osc.core.http_DELETE(url)
|
||||
|
||||
|
||||
class CommandLineInterface(ToolBase.CommandLineInterface):
|
||||
def __init__(self, *args, **kwargs):
|
||||
ToolBase.CommandLineInterface.__init__(self, args, kwargs)
|
||||
|
||||
def setup_tool(self):
|
||||
tool = ContainerCleaner()
|
||||
if self.options.debug:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
elif self.options.verbose:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
return tool
|
||||
|
||||
def do_run(self, subcmd, opts, project):
|
||||
"""${cmd_name}: run the Container cleaner for the specified project
|
||||
|
||||
${cmd_usage}
|
||||
${cmd_option_list}
|
||||
"""
|
||||
|
||||
self.tool.run(project)
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli = CommandLineInterface()
|
||||
sys.exit(cli.main())
|
1
dist/package/openSUSE-release-tools.spec
vendored
1
dist/package/openSUSE-release-tools.spec
vendored
@ -431,6 +431,7 @@ exit 0
|
||||
%{_bindir}/osrt-check_source_in_factory
|
||||
%{_bindir}/osrt-check_tags_in_requests
|
||||
%{_bindir}/osrt-compare_pkglist
|
||||
%{_bindir}/osrt-container_cleaner
|
||||
%{_bindir}/osrt-deptool
|
||||
%{_bindir}/osrt-fcc_submitter
|
||||
%{_bindir}/osrt-findfileconflicts
|
||||
|
23
gocd/container-cleaner.gocd.yaml
Normal file
23
gocd/container-cleaner.gocd.yaml
Normal file
@ -0,0 +1,23 @@
|
||||
format_version: 3
|
||||
pipelines:
|
||||
ContainerCleaner.Tumbleweed:
|
||||
group: openSUSE.Checkers
|
||||
lock_behavior: unlockWhenFinished
|
||||
environment_variables:
|
||||
OSC_CONFIG: /home/go/config/oscrc-totest-manager
|
||||
materials:
|
||||
script:
|
||||
git: https://github.com/openSUSE/openSUSE-release-tools.git
|
||||
branch: master
|
||||
destination: scripts
|
||||
timer:
|
||||
spec: 0 0 0-23/2 ? * *
|
||||
only_on_changes: false
|
||||
stages:
|
||||
- Run:
|
||||
approval: manual
|
||||
resources:
|
||||
- staging-bot
|
||||
tasks:
|
||||
- script: |-
|
||||
scripts/container_cleaner.py --verbose run openSUSE:Containers:Tumbleweed
|
129
tests/container_cleaner_tests.py
Normal file
129
tests/container_cleaner_tests.py
Normal file
@ -0,0 +1,129 @@
|
||||
import unittest
|
||||
|
||||
from container_cleaner import ContainerCleaner
|
||||
|
||||
class MockedContainerCleaner(ContainerCleaner):
|
||||
def __init__(self, container_arch_map):
|
||||
self.container_arch_map = container_arch_map
|
||||
|
||||
def getDirEntries(self, path):
|
||||
"""Mock certain OBS APIs returning directory entries"""
|
||||
if path == ["source", "mock:prj"]:
|
||||
srccontainers = [a.split(":")[0] for a in self.container_arch_map.keys()]
|
||||
return list(set(srccontainers)) # Remove duplicates
|
||||
elif path == ["build", "mock:prj", "containers"]:
|
||||
all_archs = []
|
||||
for archs in self.container_arch_map.values():
|
||||
all_archs += archs
|
||||
|
||||
return list(set(all_archs))
|
||||
elif path[0:3] == ["build", "mock:prj", "containers"] and len(path) == 4:
|
||||
arch = path[3]
|
||||
ret = []
|
||||
for srccontainer in self.container_arch_map:
|
||||
ret += [srccontainer]
|
||||
|
||||
return ret
|
||||
else:
|
||||
raise RuntimeError("Path %s not expected" % path)
|
||||
|
||||
def getDirBinaries(self, path):
|
||||
"""Mock certain OBS APIs returning a list of binaries"""
|
||||
if path[0:3] == ["build", "mock:prj", "containers"] and len(path) == 5:
|
||||
arch = path[3]
|
||||
srccontainer = path[4]
|
||||
if arch in self.container_arch_map[srccontainer]:
|
||||
return ["A binary"]
|
||||
|
||||
return []
|
||||
else:
|
||||
raise RuntimeError("Path %s not expected" % path)
|
||||
|
||||
|
||||
class TestContainerCleaner(unittest.TestCase):
|
||||
def doTest(self, container_arch_map, to_be_deleted_exp):
|
||||
cleaner = MockedContainerCleaner(container_arch_map)
|
||||
to_be_deleted = cleaner.findSourcepkgsToDelete("mock:prj")
|
||||
to_be_deleted.sort()
|
||||
self.assertEqual(to_be_deleted, to_be_deleted_exp)
|
||||
|
||||
def test_empty(self):
|
||||
"""Empty project, do nothing"""
|
||||
container_arch_map = {}
|
||||
|
||||
to_be_deleted_exp = []
|
||||
|
||||
return self.doTest(container_arch_map, to_be_deleted_exp)
|
||||
|
||||
def test_nothingToDo(self):
|
||||
"""Non-empty project, still do nothing"""
|
||||
container_arch_map = {"c": ["i586", "x86_64"],
|
||||
"c.01": ["i586"],
|
||||
"c.02": ["x86_64"],
|
||||
"c.04": ["i586", "x86_64"],
|
||||
"c.06": ["i586"],
|
||||
"c.07": ["x86_64"],
|
||||
"c.08": ["i586", "x86_64"],
|
||||
"c.11": [],
|
||||
"d.42": [], "d.43": []}
|
||||
|
||||
to_be_deleted_exp = []
|
||||
|
||||
return self.doTest(container_arch_map, to_be_deleted_exp)
|
||||
|
||||
def test_multiplePackages(self):
|
||||
"""Multiple packages in one project"""
|
||||
container_arch_map = {"c": ["i586", "x86_64"],
|
||||
"c.01": ["i586"],
|
||||
"c.02": ["x86_64"],
|
||||
"c.03": [],
|
||||
"c.04": ["i586", "x86_64"],
|
||||
"c.05": ["i586", "x86_64"],
|
||||
"c.06": ["i586"],
|
||||
"c.07": ["x86_64"],
|
||||
"c.08": ["i586", "x86_64"],
|
||||
"c.09": ["i586", "x86_64"],
|
||||
"c.10": ["i586", "x86_64"],
|
||||
"c.11": [],
|
||||
"d.42": [], "d.43": [],
|
||||
"e.51": ["i586"],
|
||||
"e.52": ["aarch64"],
|
||||
"e.53": ["i586"],
|
||||
"e.54": ["i586"],
|
||||
"e.55": ["i586"],
|
||||
"e.56": ["i586"],
|
||||
"e.57": ["i586"]}
|
||||
|
||||
to_be_deleted_exp = ["c", "c.01", "c.02", "c.03", "c.04",
|
||||
"e.51"]
|
||||
|
||||
return self.doTest(container_arch_map, to_be_deleted_exp)
|
||||
|
||||
def test_multibuild(self):
|
||||
"""Packages using _multbuild.
|
||||
There is no special handling for _multibuild - It's enough if any flavor has binaries."""
|
||||
container_arch_map = {"c:docker": ["i586", "x86_64"],
|
||||
"c.01:docker": ["i586"],
|
||||
"c.02:lxc": ["x86_64"],
|
||||
"c.03:docker": [],
|
||||
"c.04": ["i586", "x86_64"],
|
||||
"c.05:docker": ["i586", "x86_64"],
|
||||
"c.06:docker": ["i586"],
|
||||
"c.07:docker": ["x86_64"],
|
||||
"c.08:docker": ["i586", "x86_64"],
|
||||
"c.09:docker": ["i586", "x86_64"],
|
||||
"c.10:docker": ["i586", "x86_64"],
|
||||
"c.11:docker": [],
|
||||
"d.42:lxc": [], "d.43": [],
|
||||
"e.51": ["i586"],
|
||||
"e.52": ["aarch64"],
|
||||
"e.53": ["i586"],
|
||||
"e.54:docker": ["i586"],
|
||||
"e.55:docker": ["i586"],
|
||||
"e.56": ["i586"],
|
||||
"e.57": ["i586"]}
|
||||
|
||||
to_be_deleted_exp = ["c", "c.01", "c.02", "c.03", "c.04",
|
||||
"e.51"]
|
||||
|
||||
return self.doTest(container_arch_map, to_be_deleted_exp)
|
Loading…
x
Reference in New Issue
Block a user