From 1de77684a13719ba0b97221bc72a26646d0dc170 Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 16:39:35 +0200 Subject: [PATCH 01/12] add --dry option --- manager_42.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/manager_42.py b/manager_42.py index b10a0639..6f57c8af 100755 --- a/manager_42.py +++ b/manager_42.py @@ -354,6 +354,8 @@ if __name__ == '__main__': parser.add_argument('-f', '--from', dest='from_prj', metavar='PROJECT', help='project where to get the updates (default: %s)' % OPENSUSE, default=OPENSUSE) + parser.add_argument('-n', '--dry', action='store_true', + help='dry run, no POST, PUT, DELETE') args = parser.parse_args() @@ -361,4 +363,12 @@ if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG if args.debug else logging.INFO) + if args.dry: + def dryrun(t, *args, **kwargs): + return lambda *args, **kwargs: logging.info("dryrun %s %s %s", t, args, str(kwargs)[:30]) + + http_POST = dryrun('POST') + http_PUT = dryrun('PUT') + http_DELETE = dryrun('DELETE') + sys.exit(main(args)) From e1f79a9d6eb83fef84e38d030f7807424bf7577f Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 16:40:27 +0200 Subject: [PATCH 02/12] allow to operate on specified packages only --- manager_42.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/manager_42.py b/manager_42.py index 6f57c8af..c3a32f80 100755 --- a/manager_42.py +++ b/manager_42.py @@ -159,11 +159,14 @@ class UpdateCrawler(object): self.remove_packages('openSUSE:42', packages) - def crawl(self): + def crawl(self, packages = []): """Main method of the class that run the crawler.""" - packages = self.get_source_packages(self.from_prj, expand=False) - packages = [ p for p in packages if not p.startswith('_') ] + if packages: + packages = [p for p in packages if p in self.packages[self.from_prj]] + else: + packages = self.get_source_packages(self.from_prj, expand=False) + packages = [ p for p in packages if not p.startswith('_') ] requests = dict() left_packages = [] @@ -345,8 +348,7 @@ def main(args): uc.freeze_candidates() if __name__ == '__main__': - description = 'Create SR from SLE to the new openSUSE:42 project for '\ - 'every new update.' + description = 'maintain sort openSUSE:42 packages into subprojects' parser = argparse.ArgumentParser(description=description) parser.add_argument('-A', '--apiurl', metavar='URL', help='API URL') parser.add_argument('-d', '--debug', action='store_true', @@ -356,6 +358,7 @@ if __name__ == '__main__': default=OPENSUSE) parser.add_argument('-n', '--dry', action='store_true', help='dry run, no POST, PUT, DELETE') + parser.add_argument("package", nargs='*', help="package to check") args = parser.parse_args() From b4634577a78a8efa0e24e70d6c4b5108898e4b9d Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 16:42:05 +0200 Subject: [PATCH 03/12] use logging instead of print --- manager_42.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/manager_42.py b/manager_42.py index c3a32f80..07f35b81 100755 --- a/manager_42.py +++ b/manager_42.py @@ -123,7 +123,7 @@ class UpdateCrawler(object): dst_meta = dst_meta.format(package) url = makeurl(self.apiurl, ['source', project, package, '_meta']) - print "PUT", url + logging.debug("create %s/%s", project, package) http_PUT(url, data=dst_meta) def _link_content(self, sourceprj, sourcepkg, rev): @@ -139,11 +139,10 @@ class UpdateCrawler(object): def upload_link(self, project, package, link_string): url = makeurl(self.apiurl, ['source', project, package, '_link']) - print "PUT", url http_PUT(url, data=link_string) def link_packages(self, packages, sourceprj, sourcepkg, sourcerev, targetprj, targetpkg): - print packages, sourceprj, sourcepkg, sourcerev, targetpkg + logging.info("update link %s/%s -> %s/%s@%s [%s]", targetprj, targetpkg, sourceprj, sourcepkg, sourcerev, ','.join(packages)) self.remove_packages('openSUSE:42:SLE12-Picks', packages) self.remove_packages('openSUSE:42:Factory-Copies', packages) self.remove_packages('openSUSE:42:SLE-Pkgs-With-Overwrites', packages) @@ -153,6 +152,7 @@ class UpdateCrawler(object): self.upload_link(targetprj, targetpkg, link) for package in [ p for p in packages if p != targetpkg ]: + logging.debug("linking %s -> %s", package, targetpkg) link = "".format(targetpkg) self.create_package_container(targetprj, package) self.upload_link(targetprj, package, link) @@ -174,7 +174,7 @@ class UpdateCrawler(object): for package in packages: requestid = self.get_latest_request(self.from_prj, package) if requestid is None: - print package, "is not from request" + logging.warn("%s is not from request", package) left_packages.append(package) continue if requestid in requests: @@ -185,10 +185,10 @@ class UpdateCrawler(object): for request, packages in requests.items(): sourceprj, sourcepkg, sourcerev, targetpkg = self.get_request_infos(request) if not sourceprj in self.project_mapping: - print "source", sourceprj + logging.warn("unrecognized source project %s for [%s] in request %s", sourceprj, packages, request) left_packages = left_packages + packages continue - print request, packages, sourceprj, sourcepkg, sourcerev, targetpkg + logging.debug(" ".join((request, ','.join(packages), sourceprj, sourcepkg, sourcerev, targetpkg))) targetprj = self.project_mapping[sourceprj] self.link_packages(packages, sourceprj, sourcepkg, sourcerev, targetprj, targetpkg) @@ -220,7 +220,7 @@ class UpdateCrawler(object): root = ET.fromstring(self._get_source_package(self.from_prj, package, None)) linked = root.find('linked') if not linked is None and linked.get('package') != package: - print "subpackage?" + logging.warn("link mismatch: %s <> %s, subpackage?", linked.get('package'), package) continue srcmd5 = self.check_factory_sources(package, root.get('verifymd5')) if srcmd5: @@ -247,6 +247,7 @@ class UpdateCrawler(object): if link is None: return rev = link.get('rev') + # XXX: magic number? if rev and len(rev) > 5: return True if not link.get('project'): @@ -368,7 +369,7 @@ if __name__ == '__main__': if args.dry: def dryrun(t, *args, **kwargs): - return lambda *args, **kwargs: logging.info("dryrun %s %s %s", t, args, str(kwargs)[:30]) + return lambda *args, **kwargs: logging.debug("dryrun %s %s %s", t, args, str(kwargs)[:30]) http_POST = dryrun('POST') http_PUT = dryrun('PUT') From be4232f24e259173d5d8a2da5ac63434448374b8 Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 16:42:32 +0200 Subject: [PATCH 04/12] avoid exception when trying to delete non existing package --- manager_42.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/manager_42.py b/manager_42.py index 07f35b81..565be291 100755 --- a/manager_42.py +++ b/manager_42.py @@ -100,6 +100,8 @@ class UpdateCrawler(object): def remove_packages(self, project, packages): for package in packages: + if not package in self.packages[project]: + continue url = makeurl(self.apiurl, ['source', project, package]) try: http_DELETE(url) From 3e02ff989a2f3ffc994e4b174c959e1260a87c5c Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 16:44:50 +0200 Subject: [PATCH 05/12] disable strange code Not sure what this code is good for. It would unlink new submissions that landed in openSUSE:42 if there are duplicates. After code running later would not know where the sources came from as the last commit wouldn't have a request number attached anymore. --- manager_42.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/manager_42.py b/manager_42.py index 565be291..07f31d5e 100755 --- a/manager_42.py +++ b/manager_42.py @@ -272,12 +272,13 @@ class UpdateCrawler(object): for project in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites', 'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']: for package in self.packages[project]: if package in mypackages: - # TODO: detach only if actually a link to the deleted package - url = makeurl(self.apiurl, ['source', 'openSUSE:42', package], { 'opackage': package, 'oproject': 'openSUSE:42', 'cmd': 'copy', 'expand': '1'} ) - try: - http_POST(url) - except urllib2.HTTPError, err: - pass + # XXX: why was this code here? +# # TODO: detach only if actually a link to the deleted package +# url = makeurl(self.apiurl, ['source', 'openSUSE:42', package], { 'opackage': package, 'oproject': 'openSUSE:42', 'cmd': 'copy', 'expand': '1'} ) +# try: +# http_POST(url) +# except urllib2.HTTPError, err: +# pass url = makeurl(self.apiurl, ['source', project, package]) http_DELETE(url) self.packages[project].remove(package) From 364e75e4d8114175734fa5f1ff79e3d7823dac23 Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 16:54:24 +0200 Subject: [PATCH 06/12] avoid hardcoding projects lists all over the place --- manager_42.py | 47 ++++++++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/manager_42.py b/manager_42.py index 07f31d5e..ef35486e 100755 --- a/manager_42.py +++ b/manager_42.py @@ -34,7 +34,6 @@ import sys from osclib.memoize import memoize OPENSUSE = 'openSUSE:42' -SLE = 'SUSE:SLE-12:Update' makeurl = osc.core.makeurl http_GET = osc.core.http_GET @@ -48,12 +47,27 @@ class UpdateCrawler(object): self.from_prj = from_prj self.apiurl = osc.conf.config['apiurl'] self.debug = osc.conf.config['debug'] + self.project_preference_order = [ + 'SUSE:SLE-12-SP1:Update', + 'SUSE:SLE-12-SP1:GA', + 'SUSE:SLE-12:Update', + 'SUSE:SLE-12:GA', + 'openSUSE:Factory', + ] + self.subprojects = [ + self.from_prj + ':SLE-Pkgs-With-Overwrites', + self.from_prj + ':Factory-Copies', + self.from_prj + ':SLE12-Picks', + ] self.project_mapping = {} - for prj in ['SUSE:SLE-12-SP1:Update', 'SUSE:SLE-12-SP1:GA', 'SUSE:SLE-12:Update', 'SUSE:SLE-12:GA']: - self.project_mapping[prj] = 'openSUSE:42:SLE12-Picks' - self.project_mapping['openSUSE:Factory'] = 'openSUSE:42:Factory-Copies' + for prj in self.project_preference_order: + if prj.startswith('SUSE:'): + self.project_mapping[prj] = self.from_prj + ':SLE12-Picks' + else: + self.project_mapping[prj] = self.from_prj + ':Factory-Copies' + self.packages = dict() - for project in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites', 'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']: + for project in [self.from_prj] + self.subprojects: self.packages[project] = self.get_source_packages(project) def get_source_packages(self, project, expand=False): @@ -159,7 +173,7 @@ class UpdateCrawler(object): self.create_package_container(targetprj, package) self.upload_link(targetprj, package, link) - self.remove_packages('openSUSE:42', packages) + self.remove_packages(self.from_prj, packages) def crawl(self, packages = []): """Main method of the class that run the crawler.""" @@ -268,8 +282,10 @@ class UpdateCrawler(object): self.check_link(prj, package) def check_dups(self): + """ walk through projects in order of preference and delete + duplicates in overlayed projects""" mypackages = dict() - for project in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites', 'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']: + for project in [self.from_prj] + self.subprojects: for package in self.packages[project]: if package in mypackages: # XXX: why was this code here? @@ -295,8 +311,7 @@ class UpdateCrawler(object): exists = False if package.get('package').startswith('_product'): continue - for prj in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites', - 'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']: + for prj in [self.from_prj] + self.subprojects: if package.get('package') in self.packages[prj]: exists = True if exists: @@ -326,8 +341,7 @@ class UpdateCrawler(object): files.remove(subpackage) for subpackage in files: - for prj in ['openSUSE:42', 'openSUSE:42:SLE-Pkgs-With-Overwrites', - 'openSUSE:42:Factory-Copies', 'openSUSE:42:SLE12-Picks']: + for prj in [self.from_prj] + self.subprojects: if subpackage in self.packages[prj]: self.remove_packages(prj, [ subpackage ]) @@ -342,13 +356,12 @@ def main(args): uc = UpdateCrawler(args.from_prj) uc.check_dups() - uc.check_multiple_specs('openSUSE:42:Factory-Copies') - uc.check_multiple_specs('openSUSE:42:SLE12-Picks') - lp = uc.crawl() + for prj in uc.subprojects: + uc.check_multiple_specs(prj) + lp = uc.crawl(args.package) uc.try_to_find_left_packages(lp) - uc.find_invalid_links('openSUSE:42') - uc.find_invalid_links('openSUSE:42:SLE12-Picks') - uc.find_invalid_links('openSUSE:42:Factory-Copies') + for prj in [uc.from_prj] + uc.subprojects: + uc.find_invalid_links(prj) uc.freeze_candidates() if __name__ == '__main__': From 64116b2bed1d01c3be9d504ff9a73679bbb95151 Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 16:47:59 +0200 Subject: [PATCH 07/12] check all underlay projects for sources --- manager_42.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/manager_42.py b/manager_42.py index ef35486e..a9011b72 100755 --- a/manager_42.py +++ b/manager_42.py @@ -210,10 +210,10 @@ class UpdateCrawler(object): return left_packages - def check_factory_sources(self, package, verifymd5): + def check_source_in_project(self, project, package, verifymd5): try: his = http_GET(makeurl(self.apiurl, - ['source', 'openSUSE:Factory', package, '_history'])).read() + ['source', project, package, '_history'])).read() except urllib2.HTTPError: return None @@ -225,12 +225,14 @@ class UpdateCrawler(object): for i in range(min(len(revs), 5)): # check last 5 commits srcmd5=revs.pop(0) root = http_GET(makeurl(self.apiurl, - ['source', 'openSUSE:Factory', package], { 'rev': srcmd5, 'view': 'info'})).read() + ['source', project, package], { 'rev': srcmd5, 'view': 'info'})).read() root = ET.fromstring(root) if root.get('verifymd5') == verifymd5: return srcmd5 return None + # check if we can find the srcmd5 in any of our underlay + # projects def try_to_find_left_packages(self, packages): for package in packages: root = ET.fromstring(self._get_source_package(self.from_prj, package, None)) @@ -238,9 +240,13 @@ class UpdateCrawler(object): if not linked is None and linked.get('package') != package: logging.warn("link mismatch: %s <> %s, subpackage?", linked.get('package'), package) continue - srcmd5 = self.check_factory_sources(package, root.get('verifymd5')) - if srcmd5: - self.link_packages([ package ], 'openSUSE:Factory', package, srcmd5, self.project_mapping['openSUSE:Factory'], package) + + for project in self.project_preference_order: + logging.debug("check whether %s came from %s", package, project) + srcmd5 = self.check_source_in_project(project, package, root.get('verifymd5')) + if srcmd5: + self.link_packages([ package ], project, package, srcmd5, self.project_mapping[project], package) + break def check_inner_link(self, project, package, link): if not link.get('cicount'): From ce09335b166af398ac13db597d02dae6a47ea92f Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 16:53:54 +0200 Subject: [PATCH 08/12] add --skip-sanity-checks parameter --- manager_42.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/manager_42.py b/manager_42.py index a9011b72..ab5d4bab 100755 --- a/manager_42.py +++ b/manager_42.py @@ -41,6 +41,9 @@ http_DELETE = osc.core.http_DELETE http_PUT = osc.core.http_PUT http_POST = osc.core.http_POST +# TODO: +# before deleting a package, search for it's links and delete them +# as well. See build-service/src/api/app/models/package.rb -> find_linking_packages() class UpdateCrawler(object): def __init__(self, from_prj): @@ -362,12 +365,14 @@ def main(args): uc = UpdateCrawler(args.from_prj) uc.check_dups() - for prj in uc.subprojects: - uc.check_multiple_specs(prj) + if not args.skip_sanity_checks: + for prj in uc.subprojects: + uc.check_multiple_specs(prj) lp = uc.crawl(args.package) uc.try_to_find_left_packages(lp) - for prj in [uc.from_prj] + uc.subprojects: - uc.find_invalid_links(prj) + if not args.skip_sanity_checks: + for prj in [uc.from_prj] + uc.subprojects: + uc.find_invalid_links(prj) uc.freeze_candidates() if __name__ == '__main__': @@ -379,6 +384,8 @@ if __name__ == '__main__': parser.add_argument('-f', '--from', dest='from_prj', metavar='PROJECT', help='project where to get the updates (default: %s)' % OPENSUSE, default=OPENSUSE) + parser.add_argument('--skip-sanity-checks', action='store_true', + help='don\'t do slow check for broken links (only for testing)') parser.add_argument('-n', '--dry', action='store_true', help='dry run, no POST, PUT, DELETE') parser.add_argument("package", nargs='*', help="package to check") From bfb031a9d8a23ccbda9a5ee74e15a547369f09aa Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Wed, 12 Aug 2015 17:14:38 +0200 Subject: [PATCH 09/12] use remove_packages everywhere --- manager_42.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/manager_42.py b/manager_42.py index ab5d4bab..2ad714e7 100755 --- a/manager_42.py +++ b/manager_42.py @@ -119,6 +119,7 @@ class UpdateCrawler(object): for package in packages: if not package in self.packages[project]: continue + logging.info("deleting %s/%s", project, package) url = makeurl(self.apiurl, ['source', project, package]) try: http_DELETE(url) @@ -255,9 +256,7 @@ class UpdateCrawler(object): if not link.get('cicount'): return if link.get('package') not in self.packages[project]: - url = makeurl(self.apiurl, ['source', project, package]) - http_DELETE(url) - self.packages[project].remove(package) + self.remove_packages(project, [package]) def get_link(self, project, package): try: @@ -304,9 +303,7 @@ class UpdateCrawler(object): # http_POST(url) # except urllib2.HTTPError, err: # pass - url = makeurl(self.apiurl, ['source', project, package]) - http_DELETE(url) - self.packages[project].remove(package) + self.remove_packages(project, [packages]) else: mypackages[package] = project @@ -351,8 +348,7 @@ class UpdateCrawler(object): for subpackage in files: for prj in [self.from_prj] + self.subprojects: - if subpackage in self.packages[prj]: - self.remove_packages(prj, [ subpackage ]) + self.remove_packages(prj, self.packages[prj]) link = "".format(mainpackage) self.create_package_container(project, subpackage) From 982fd031841577fbe918f9f24a37ccf8eb3f8c08 Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Thu, 13 Aug 2015 08:31:53 +0200 Subject: [PATCH 10/12] must user package instead of packages here --- manager_42.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manager_42.py b/manager_42.py index 2ad714e7..638cbcb4 100755 --- a/manager_42.py +++ b/manager_42.py @@ -303,7 +303,7 @@ class UpdateCrawler(object): # http_POST(url) # except urllib2.HTTPError, err: # pass - self.remove_packages(project, [packages]) + self.remove_packages(project, [package]) else: mypackages[package] = project From 8ba3963c95d2169495c739b4706c65aa937d8770 Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Thu, 13 Aug 2015 11:09:09 +0200 Subject: [PATCH 11/12] empty list as default parameter is bad http://docs.python-guide.org/en/latest/writing/gotchas/#mutable-default-arguments --- manager_42.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manager_42.py b/manager_42.py index 638cbcb4..e8b9d67c 100755 --- a/manager_42.py +++ b/manager_42.py @@ -179,7 +179,7 @@ class UpdateCrawler(object): self.remove_packages(self.from_prj, packages) - def crawl(self, packages = []): + def crawl(self, packages = None): """Main method of the class that run the crawler.""" if packages: From b610d3a03cc089b9d08dc7697fcfaf79bea5057a Mon Sep 17 00:00:00 2001 From: Ludwig Nussel Date: Thu, 13 Aug 2015 11:09:29 +0200 Subject: [PATCH 12/12] improve style --- manager_42.py | 42 ++++++++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/manager_42.py b/manager_42.py index e8b9d67c..6c63c037 100755 --- a/manager_42.py +++ b/manager_42.py @@ -58,10 +58,12 @@ class UpdateCrawler(object): 'openSUSE:Factory', ] self.subprojects = [ - self.from_prj + ':SLE-Pkgs-With-Overwrites', - self.from_prj + ':Factory-Copies', - self.from_prj + ':SLE12-Picks', + '%s:SLE-Pkgs-With-Overwrites' % self.from_prj, + '%s:Factory-Copies' % self.from_prj, + '%s:SLE12-Picks' % self.from_prj, ] + self.projects = [self.from_prj] + self.subprojects + self.project_mapping = {} for prj in self.project_preference_order: if prj.startswith('SUSE:'): @@ -70,9 +72,9 @@ class UpdateCrawler(object): self.project_mapping[prj] = self.from_prj + ':Factory-Copies' self.packages = dict() - for project in [self.from_prj] + self.subprojects: + for project in self.projects: self.packages[project] = self.get_source_packages(project) - + def get_source_packages(self, project, expand=False): """Return the list of packages in a project.""" query = {'expand': 1} if expand else {} @@ -90,7 +92,7 @@ class UpdateCrawler(object): opts['rev'] = revision return http_GET(makeurl(self.apiurl, ['source', project, package], opts)).read() - + def get_latest_request(self, project, package): history = http_GET(makeurl(self.apiurl, ['source', project, package, '_history'])).read() @@ -190,7 +192,7 @@ class UpdateCrawler(object): requests = dict() left_packages = [] - + for package in packages: requestid = self.get_latest_request(self.from_prj, package) if requestid is None: @@ -220,7 +222,7 @@ class UpdateCrawler(object): ['source', project, package, '_history'])).read() except urllib2.HTTPError: return None - + his = ET.fromstring(his) revs = list() for rev in his.findall('revision'): @@ -234,7 +236,7 @@ class UpdateCrawler(object): if root.get('verifymd5') == verifymd5: return srcmd5 return None - + # check if we can find the srcmd5 in any of our underlay # projects def try_to_find_left_packages(self, packages): @@ -265,7 +267,7 @@ class UpdateCrawler(object): except urllib2.HTTPError: return None return ET.fromstring(link) - + def check_link(self, project, package): link = self.get_link(project, package) if link is None: @@ -284,7 +286,7 @@ class UpdateCrawler(object): ['source', link.get('project'), link.get('package')], opts )).read() root = ET.fromstring(root) self.link_packages([package], link.get('project'), link.get('package'), root.get('srcmd5'), project, package) - + def find_invalid_links(self, prj): for package in self.packages[prj]: self.check_link(prj, package) @@ -293,7 +295,7 @@ class UpdateCrawler(object): """ walk through projects in order of preference and delete duplicates in overlayed projects""" mypackages = dict() - for project in [self.from_prj] + self.subprojects: + for project in self.projects: for package in self.packages[project]: if package in mypackages: # XXX: why was this code here? @@ -312,12 +314,12 @@ class UpdateCrawler(object): root = ET.fromstring(http_GET(url).read()) flink = ET.Element('frozenlinks') fl = ET.SubElement(flink, 'frozenlink', {'project': 'openSUSE:Factory'}) - + for package in root.findall('sourceinfo'): exists = False if package.get('package').startswith('_product'): continue - for prj in [self.from_prj] + self.subprojects: + for prj in self.projects: if package.get('package') in self.packages[prj]: exists = True if exists: @@ -325,7 +327,7 @@ class UpdateCrawler(object): ET.SubElement(fl, 'package', { 'name': package.get('package'), 'srcmd5': package.get('srcmd5'), 'vrev': package.get('vrev') }) - + url = makeurl(self.apiurl, ['source', 'openSUSE:42:Factory-Candidates-Check', '_project', '_frozenlinks'], {'meta': '1'}) http_PUT(url, data=ET.tostring(flink)) @@ -347,13 +349,13 @@ class UpdateCrawler(object): files.remove(subpackage) for subpackage in files: - for prj in [self.from_prj] + self.subprojects: + for prj in self.projects: self.remove_packages(prj, self.packages[prj]) - + link = "".format(mainpackage) self.create_package_container(project, subpackage) self.upload_link(project, subpackage, link) - + def main(args): # Configure OSC osc.conf.get_config(override_apiurl=args.apiurl) @@ -367,10 +369,10 @@ def main(args): lp = uc.crawl(args.package) uc.try_to_find_left_packages(lp) if not args.skip_sanity_checks: - for prj in [uc.from_prj] + uc.subprojects: + for prj in uc.projects: uc.find_invalid_links(prj) uc.freeze_candidates() - + if __name__ == '__main__': description = 'maintain sort openSUSE:42 packages into subprojects' parser = argparse.ArgumentParser(description=description)