commit
f52b4b75f1
163
manager_42.py
163
manager_42.py
@ -34,7 +34,6 @@ import sys
|
||||
from osclib.memoize import memoize
|
||||
|
||||
OPENSUSE = 'openSUSE:42'
|
||||
SLE = 'SUSE:SLE-12:Update'
|
||||
|
||||
makeurl = osc.core.makeurl
|
||||
http_GET = osc.core.http_GET
|
||||
@ -42,20 +41,40 @@ http_DELETE = osc.core.http_DELETE
|
||||
http_PUT = osc.core.http_PUT
|
||||
http_POST = osc.core.http_POST
|
||||
|
||||
# TODO:
|
||||
# before deleting a package, search for it's links and delete them
|
||||
# as well. See build-service/src/api/app/models/package.rb -> find_linking_packages()
|
||||
|
||||
class UpdateCrawler(object):
|
||||
def __init__(self, from_prj):
|
||||
self.from_prj = from_prj
|
||||
self.apiurl = osc.conf.config['apiurl']
|
||||
self.debug = osc.conf.config['debug']
|
||||
self.project_preference_order = [
|
||||
'SUSE:SLE-12-SP1:Update',
|
||||
'SUSE:SLE-12-SP1:GA',
|
||||
'SUSE:SLE-12:Update',
|
||||
'SUSE:SLE-12:GA',
|
||||
'openSUSE:Factory',
|
||||
]
|
||||
self.subprojects = [
|
||||
'%s:SLE-Pkgs-With-Overwrites' % self.from_prj,
|
||||
'%s:Factory-Copies' % self.from_prj,
|
||||
'%s:SLE12-Picks' % self.from_prj,
|
||||
]
|
||||
self.projects = [self.from_prj] + self.subprojects
|
||||
|
||||
self.project_mapping = {}
|
||||
for prj in ['SUSE:SLE-12-SP1:Update', 'SUSE:SLE-12-SP1:GA', 'SUSE:SLE-12:Update', 'SUSE:SLE-12:GA']:
|
||||
self.project_mapping[prj] = 'openSUSE:42:SLE12-Picks'
|
||||
self.project_mapping['openSUSE:Factory'] = 'openSUSE:42:Factory-Copies'
|
||||
for prj in self.project_preference_order:
|
||||
if prj.startswith('SUSE:'):
|
||||
self.project_mapping[prj] = self.from_prj + ':SLE12-Picks'
|
||||
else:
|
||||
self.project_mapping[prj] = self.from_prj + ':Factory-Copies'
|
||||
|
||||
self.packages = dict()
|
||||
for project in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites', 'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']:
|
||||
for project in self.projects:
|
||||
self.packages[project] = self.get_source_packages(project)
|
||||
|
||||
|
||||
def get_source_packages(self, project, expand=False):
|
||||
"""Return the list of packages in a project."""
|
||||
query = {'expand': 1} if expand else {}
|
||||
@ -73,7 +92,7 @@ class UpdateCrawler(object):
|
||||
opts['rev'] = revision
|
||||
return http_GET(makeurl(self.apiurl,
|
||||
['source', project, package], opts)).read()
|
||||
|
||||
|
||||
def get_latest_request(self, project, package):
|
||||
history = http_GET(makeurl(self.apiurl,
|
||||
['source', project, package, '_history'])).read()
|
||||
@ -100,6 +119,9 @@ class UpdateCrawler(object):
|
||||
|
||||
def remove_packages(self, project, packages):
|
||||
for package in packages:
|
||||
if not package in self.packages[project]:
|
||||
continue
|
||||
logging.info("deleting %s/%s", project, package)
|
||||
url = makeurl(self.apiurl, ['source', project, package])
|
||||
try:
|
||||
http_DELETE(url)
|
||||
@ -123,7 +145,7 @@ class UpdateCrawler(object):
|
||||
dst_meta = dst_meta.format(package)
|
||||
|
||||
url = makeurl(self.apiurl, ['source', project, package, '_meta'])
|
||||
print "PUT", url
|
||||
logging.debug("create %s/%s", project, package)
|
||||
http_PUT(url, data=dst_meta)
|
||||
|
||||
def _link_content(self, sourceprj, sourcepkg, rev):
|
||||
@ -139,11 +161,10 @@ class UpdateCrawler(object):
|
||||
|
||||
def upload_link(self, project, package, link_string):
|
||||
url = makeurl(self.apiurl, ['source', project, package, '_link'])
|
||||
print "PUT", url
|
||||
http_PUT(url, data=link_string)
|
||||
|
||||
def link_packages(self, packages, sourceprj, sourcepkg, sourcerev, targetprj, targetpkg):
|
||||
print packages, sourceprj, sourcepkg, sourcerev, targetpkg
|
||||
logging.info("update link %s/%s -> %s/%s@%s [%s]", targetprj, targetpkg, sourceprj, sourcepkg, sourcerev, ','.join(packages))
|
||||
self.remove_packages('openSUSE:42:SLE12-Picks', packages)
|
||||
self.remove_packages('openSUSE:42:Factory-Copies', packages)
|
||||
self.remove_packages('openSUSE:42:SLE-Pkgs-With-Overwrites', packages)
|
||||
@ -153,25 +174,29 @@ class UpdateCrawler(object):
|
||||
self.upload_link(targetprj, targetpkg, link)
|
||||
|
||||
for package in [ p for p in packages if p != targetpkg ]:
|
||||
logging.debug("linking %s -> %s", package, targetpkg)
|
||||
link = "<link cicount='copy' package='{}' />".format(targetpkg)
|
||||
self.create_package_container(targetprj, package)
|
||||
self.upload_link(targetprj, package, link)
|
||||
|
||||
self.remove_packages('openSUSE:42', packages)
|
||||
self.remove_packages(self.from_prj, packages)
|
||||
|
||||
def crawl(self):
|
||||
def crawl(self, packages = None):
|
||||
"""Main method of the class that run the crawler."""
|
||||
|
||||
packages = self.get_source_packages(self.from_prj, expand=False)
|
||||
packages = [ p for p in packages if not p.startswith('_') ]
|
||||
if packages:
|
||||
packages = [p for p in packages if p in self.packages[self.from_prj]]
|
||||
else:
|
||||
packages = self.get_source_packages(self.from_prj, expand=False)
|
||||
packages = [ p for p in packages if not p.startswith('_') ]
|
||||
requests = dict()
|
||||
|
||||
left_packages = []
|
||||
|
||||
|
||||
for package in packages:
|
||||
requestid = self.get_latest_request(self.from_prj, package)
|
||||
if requestid is None:
|
||||
print package, "is not from request"
|
||||
logging.warn("%s is not from request", package)
|
||||
left_packages.append(package)
|
||||
continue
|
||||
if requestid in requests:
|
||||
@ -182,22 +207,22 @@ class UpdateCrawler(object):
|
||||
for request, packages in requests.items():
|
||||
sourceprj, sourcepkg, sourcerev, targetpkg = self.get_request_infos(request)
|
||||
if not sourceprj in self.project_mapping:
|
||||
print "source", sourceprj
|
||||
logging.warn("unrecognized source project %s for [%s] in request %s", sourceprj, packages, request)
|
||||
left_packages = left_packages + packages
|
||||
continue
|
||||
print request, packages, sourceprj, sourcepkg, sourcerev, targetpkg
|
||||
logging.debug(" ".join((request, ','.join(packages), sourceprj, sourcepkg, sourcerev, targetpkg)))
|
||||
targetprj = self.project_mapping[sourceprj]
|
||||
self.link_packages(packages, sourceprj, sourcepkg, sourcerev, targetprj, targetpkg)
|
||||
|
||||
return left_packages
|
||||
|
||||
def check_factory_sources(self, package, verifymd5):
|
||||
def check_source_in_project(self, project, package, verifymd5):
|
||||
try:
|
||||
his = http_GET(makeurl(self.apiurl,
|
||||
['source', 'openSUSE:Factory', package, '_history'])).read()
|
||||
['source', project, package, '_history'])).read()
|
||||
except urllib2.HTTPError:
|
||||
return None
|
||||
|
||||
|
||||
his = ET.fromstring(his)
|
||||
revs = list()
|
||||
for rev in his.findall('revision'):
|
||||
@ -206,30 +231,34 @@ class UpdateCrawler(object):
|
||||
for i in range(min(len(revs), 5)): # check last 5 commits
|
||||
srcmd5=revs.pop(0)
|
||||
root = http_GET(makeurl(self.apiurl,
|
||||
['source', 'openSUSE:Factory', package], { 'rev': srcmd5, 'view': 'info'})).read()
|
||||
['source', project, package], { 'rev': srcmd5, 'view': 'info'})).read()
|
||||
root = ET.fromstring(root)
|
||||
if root.get('verifymd5') == verifymd5:
|
||||
return srcmd5
|
||||
return None
|
||||
|
||||
|
||||
# check if we can find the srcmd5 in any of our underlay
|
||||
# projects
|
||||
def try_to_find_left_packages(self, packages):
|
||||
for package in packages:
|
||||
root = ET.fromstring(self._get_source_package(self.from_prj, package, None))
|
||||
linked = root.find('linked')
|
||||
if not linked is None and linked.get('package') != package:
|
||||
print "subpackage?"
|
||||
logging.warn("link mismatch: %s <> %s, subpackage?", linked.get('package'), package)
|
||||
continue
|
||||
srcmd5 = self.check_factory_sources(package, root.get('verifymd5'))
|
||||
if srcmd5:
|
||||
self.link_packages([ package ], 'openSUSE:Factory', package, srcmd5, self.project_mapping['openSUSE:Factory'], package)
|
||||
|
||||
for project in self.project_preference_order:
|
||||
logging.debug("check whether %s came from %s", package, project)
|
||||
srcmd5 = self.check_source_in_project(project, package, root.get('verifymd5'))
|
||||
if srcmd5:
|
||||
self.link_packages([ package ], project, package, srcmd5, self.project_mapping[project], package)
|
||||
break
|
||||
|
||||
def check_inner_link(self, project, package, link):
|
||||
if not link.get('cicount'):
|
||||
return
|
||||
if link.get('package') not in self.packages[project]:
|
||||
url = makeurl(self.apiurl, ['source', project, package])
|
||||
http_DELETE(url)
|
||||
self.packages[project].remove(package)
|
||||
self.remove_packages(project, [package])
|
||||
|
||||
def get_link(self, project, package):
|
||||
try:
|
||||
@ -238,12 +267,13 @@ class UpdateCrawler(object):
|
||||
except urllib2.HTTPError:
|
||||
return None
|
||||
return ET.fromstring(link)
|
||||
|
||||
|
||||
def check_link(self, project, package):
|
||||
link = self.get_link(project, package)
|
||||
if link is None:
|
||||
return
|
||||
rev = link.get('rev')
|
||||
# XXX: magic number?
|
||||
if rev and len(rev) > 5:
|
||||
return True
|
||||
if not link.get('project'):
|
||||
@ -256,25 +286,26 @@ class UpdateCrawler(object):
|
||||
['source', link.get('project'), link.get('package')], opts )).read()
|
||||
root = ET.fromstring(root)
|
||||
self.link_packages([package], link.get('project'), link.get('package'), root.get('srcmd5'), project, package)
|
||||
|
||||
|
||||
def find_invalid_links(self, prj):
|
||||
for package in self.packages[prj]:
|
||||
self.check_link(prj, package)
|
||||
|
||||
def check_dups(self):
|
||||
""" walk through projects in order of preference and delete
|
||||
duplicates in overlayed projects"""
|
||||
mypackages = dict()
|
||||
for project in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites', 'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']:
|
||||
for project in self.projects:
|
||||
for package in self.packages[project]:
|
||||
if package in mypackages:
|
||||
# TODO: detach only if actually a link to the deleted package
|
||||
url = makeurl(self.apiurl, ['source', 'openSUSE:42', package], { 'opackage': package, 'oproject': 'openSUSE:42', 'cmd': 'copy', 'expand': '1'} )
|
||||
try:
|
||||
http_POST(url)
|
||||
except urllib2.HTTPError, err:
|
||||
pass
|
||||
url = makeurl(self.apiurl, ['source', project, package])
|
||||
http_DELETE(url)
|
||||
self.packages[project].remove(package)
|
||||
# XXX: why was this code here?
|
||||
# # TODO: detach only if actually a link to the deleted package
|
||||
# url = makeurl(self.apiurl, ['source', 'openSUSE:42', package], { 'opackage': package, 'oproject': 'openSUSE:42', 'cmd': 'copy', 'expand': '1'} )
|
||||
# try:
|
||||
# http_POST(url)
|
||||
# except urllib2.HTTPError, err:
|
||||
# pass
|
||||
self.remove_packages(project, [package])
|
||||
else:
|
||||
mypackages[package] = project
|
||||
|
||||
@ -283,13 +314,12 @@ class UpdateCrawler(object):
|
||||
root = ET.fromstring(http_GET(url).read())
|
||||
flink = ET.Element('frozenlinks')
|
||||
fl = ET.SubElement(flink, 'frozenlink', {'project': 'openSUSE:Factory'})
|
||||
|
||||
|
||||
for package in root.findall('sourceinfo'):
|
||||
exists = False
|
||||
if package.get('package').startswith('_product'):
|
||||
continue
|
||||
for prj in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites',
|
||||
'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']:
|
||||
for prj in self.projects:
|
||||
if package.get('package') in self.packages[prj]:
|
||||
exists = True
|
||||
if exists:
|
||||
@ -297,7 +327,7 @@ class UpdateCrawler(object):
|
||||
ET.SubElement(fl, 'package', { 'name': package.get('package'),
|
||||
'srcmd5': package.get('srcmd5'),
|
||||
'vrev': package.get('vrev') })
|
||||
|
||||
|
||||
url = makeurl(self.apiurl, ['source', 'openSUSE:42:Factory-Candidates-Check', '_project', '_frozenlinks'], {'meta': '1'})
|
||||
http_PUT(url, data=ET.tostring(flink))
|
||||
|
||||
@ -319,15 +349,13 @@ class UpdateCrawler(object):
|
||||
files.remove(subpackage)
|
||||
|
||||
for subpackage in files:
|
||||
for prj in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites',
|
||||
'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']:
|
||||
if subpackage in self.packages[prj]:
|
||||
self.remove_packages(prj, [ subpackage ])
|
||||
|
||||
for prj in self.projects:
|
||||
self.remove_packages(prj, self.packages[prj])
|
||||
|
||||
link = "<link cicount='copy' package='{}' />".format(mainpackage)
|
||||
self.create_package_container(project, subpackage)
|
||||
self.upload_link(project, subpackage, link)
|
||||
|
||||
|
||||
def main(args):
|
||||
# Configure OSC
|
||||
osc.conf.get_config(override_apiurl=args.apiurl)
|
||||
@ -335,18 +363,18 @@ def main(args):
|
||||
|
||||
uc = UpdateCrawler(args.from_prj)
|
||||
uc.check_dups()
|
||||
uc.check_multiple_specs('openSUSE:42:Factory-Copies')
|
||||
uc.check_multiple_specs('openSUSE:42:SLE12-Picks')
|
||||
lp = uc.crawl()
|
||||
if not args.skip_sanity_checks:
|
||||
for prj in uc.subprojects:
|
||||
uc.check_multiple_specs(prj)
|
||||
lp = uc.crawl(args.package)
|
||||
uc.try_to_find_left_packages(lp)
|
||||
uc.find_invalid_links('openSUSE:42')
|
||||
uc.find_invalid_links('openSUSE:42:SLE12-Picks')
|
||||
uc.find_invalid_links('openSUSE:42:Factory-Copies')
|
||||
if not args.skip_sanity_checks:
|
||||
for prj in uc.projects:
|
||||
uc.find_invalid_links(prj)
|
||||
uc.freeze_candidates()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
description = 'Create SR from SLE to the new openSUSE:42 project for '\
|
||||
'every new update.'
|
||||
description = 'maintain sort openSUSE:42 packages into subprojects'
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument('-A', '--apiurl', metavar='URL', help='API URL')
|
||||
parser.add_argument('-d', '--debug', action='store_true',
|
||||
@ -354,6 +382,11 @@ if __name__ == '__main__':
|
||||
parser.add_argument('-f', '--from', dest='from_prj', metavar='PROJECT',
|
||||
help='project where to get the updates (default: %s)' % OPENSUSE,
|
||||
default=OPENSUSE)
|
||||
parser.add_argument('--skip-sanity-checks', action='store_true',
|
||||
help='don\'t do slow check for broken links (only for testing)')
|
||||
parser.add_argument('-n', '--dry', action='store_true',
|
||||
help='dry run, no POST, PUT, DELETE')
|
||||
parser.add_argument("package", nargs='*', help="package to check")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@ -361,4 +394,12 @@ if __name__ == '__main__':
|
||||
logging.basicConfig(level=logging.DEBUG if args.debug
|
||||
else logging.INFO)
|
||||
|
||||
if args.dry:
|
||||
def dryrun(t, *args, **kwargs):
|
||||
return lambda *args, **kwargs: logging.debug("dryrun %s %s %s", t, args, str(kwargs)[:30])
|
||||
|
||||
http_POST = dryrun('POST')
|
||||
http_PUT = dryrun('PUT')
|
||||
http_DELETE = dryrun('DELETE')
|
||||
|
||||
sys.exit(main(args))
|
||||
|
Loading…
x
Reference in New Issue
Block a user