diff --git a/ReviewBot.py b/ReviewBot.py index 9f7e454c..8ed9f613 100644 --- a/ReviewBot.py +++ b/ReviewBot.py @@ -367,7 +367,7 @@ class ReviewBot(object): node = ET.fromstring(''.join(m)).find('devel') if node is not None: return node.get('project'), node.get('package', None) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code != 404: raise e return None, None @@ -387,7 +387,7 @@ class ReviewBot(object): else: return False states = set([review.get('state') for review in root.findall('review') if review.get(by_what) == reviewer]) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: print('ERROR in URL %s [%s]' % (url, e)) if not states: return None @@ -644,7 +644,7 @@ class CommandLineInterface(cmdln.Cmdln): while True: try: workfunc() - except Exception, e: + except Exception as e: self.logger.exception(e) if interval: diff --git a/ToolBase.py b/ToolBase.py index 0e755584..38ac1b39 100644 --- a/ToolBase.py +++ b/ToolBase.py @@ -67,7 +67,7 @@ class ToolBase(object): def retried_GET(self, url): try: return http_GET(url) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if 500 <= e.code <= 599: print 'Retrying {}'.format(url) time.sleep(1) @@ -195,7 +195,7 @@ class CommandLineInterface(cmdln.Cmdln): while True: try: workfunc() - except Exception, e: + except Exception as e: logger.exception(e) if interval: diff --git a/abichecker/abichecker.py b/abichecker/abichecker.py index c7fe3913..189ef0b9 100755 --- a/abichecker/abichecker.py +++ b/abichecker/abichecker.py @@ -237,16 +237,16 @@ class ABIChecker(ReviewBot.ReviewBot): try: # compute list of common repos to find out what to compare myrepos = self.findrepos(src_project, src_srcinfo, dst_project, dst_srcinfo) - except NoBuildSuccess, e: + except NoBuildSuccess as e: self.logger.info(e) self.text_summary += "**Error**: %s\n"%e self.reports.append(report) return False - except NotReadyYet, e: + except NotReadyYet as e: self.logger.info(e) self.reports.append(report) return None - except SourceBroken, e: + except SourceBroken as e: self.logger.error(e) self.text_summary += "**Error**: %s\n"%e self.reports.append(report) @@ -271,21 +271,21 @@ class ABIChecker(ReviewBot.ReviewBot): dst_srcinfo = origin_srcinfo if new_repo_map is not None: myrepos = new_repo_map - except MaintenanceError, e: + except MaintenanceError as e: self.text_summary += "**Error**: %s\n\n"%e self.logger.error('%s', e) self.reports.append(report) return False - except NoBuildSuccess, e: + except NoBuildSuccess as e: self.logger.info(e) self.text_summary += "**Error**: %s\n"%e self.reports.append(report) return False - except NotReadyYet, e: + except NotReadyYet as e: self.logger.info(e) self.reports.append(report) return None - except SourceBroken, e: + except SourceBroken as e: self.logger.error(e) self.text_summary += "**Error**: %s\n"%e self.reports.append(report) @@ -304,16 +304,16 @@ class ABIChecker(ReviewBot.ReviewBot): # nothing to fetch, so no libs if dst_libs is None: continue - except DistUrlMismatch, e: + except DistUrlMismatch as e: self.logger.error("%s/%s %s/%s: %s"%(dst_project, dst_package, mr.dstrepo, mr.arch, e)) if ret == True: # need to check again ret = None continue - except MissingDebugInfo, e: + except MissingDebugInfo as e: missing_debuginfo.append(str(e)) ret = False continue - except FetchError, e: + except FetchError as e: self.logger.error(e) if ret == True: # need to check again ret = None @@ -325,16 +325,16 @@ class ABIChecker(ReviewBot.ReviewBot): if dst_libs: self.text_summary += "*Warning*: the submission does not contain any libs anymore\n\n" continue - except DistUrlMismatch, e: + except DistUrlMismatch as e: self.logger.error("%s/%s %s/%s: %s"%(src_project, src_package, mr.srcrepo, mr.arch, e)) if ret == True: # need to check again ret = None continue - except MissingDebugInfo, e: + except MissingDebugInfo as e: missing_debuginfo.append(str(e)) ret = False continue - except FetchError, e: + except FetchError as e: self.logger.error(e) if ret == True: # need to check again ret = None @@ -530,7 +530,7 @@ class ABIChecker(ReviewBot.ReviewBot): self.text_summary = '' try: ret = ReviewBot.ReviewBot.check_one_request(self, req) - except Exception, e: + except Exception as e: import traceback self.logger.error("unhandled exception in ABI checker") self.logger.error(traceback.format_exc()) @@ -572,7 +572,7 @@ class ABIChecker(ReviewBot.ReviewBot): request = self.session.query(DB.Request).filter(DB.Request.id == reqid).one() if request.state == 'done': return True - except sqlalchemy.orm.exc.NoResultFound, e: + except sqlalchemy.orm.exc.NoResultFound as e: pass return False @@ -586,7 +586,7 @@ class ABIChecker(ReviewBot.ReviewBot): self.session.flush() request.state = state request.result = result - except sqlalchemy.orm.exc.NoResultFound, e: + except sqlalchemy.orm.exc.NoResultFound as e: request = DB.Request(id = req.reqid, state = state, result = result, @@ -771,7 +771,7 @@ class ABIChecker(ReviewBot.ReviewBot): h = None try: h = self.ts.hdrFromFdno(fd) - except rpm.error, e: + except rpm.error as e: if str(e) == "public key not available": print str(e) if str(e) == "public key not trusted": @@ -786,7 +786,7 @@ class ABIChecker(ReviewBot.ReviewBot): [ 'view=cpioheaders' ]) try: r = osc.core.http_GET(u) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: raise FetchError('failed to fetch header information: %s'%e) tmpfile = NamedTemporaryFile(prefix="cpio-", delete=False) for chunk in r: @@ -831,7 +831,7 @@ class ABIChecker(ReviewBot.ReviewBot): 'srcmd5' : rev } url = osc.core.makeurl(self.apiurl, ('build', src_project, '_result'), query) return ET.parse(osc.core.http_GET(url)).getroot() - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code != 404: self.logger.error('ERROR in URL %s [%s]' % (url, e)) raise diff --git a/abichecker/abichecker_common.py b/abichecker/abichecker_common.py index 34f0f644..7a4da5c5 100644 --- a/abichecker/abichecker_common.py +++ b/abichecker/abichecker_common.py @@ -37,7 +37,7 @@ class Config(object): try: entry = self.session.query(DB.Config).filter(DB.Config.key == key).one() entry.value = value - except sqlalchemy.orm.exc.NoResultFound, e: + except sqlalchemy.orm.exc.NoResultFound as e: entry = DB.Config(key=key, value=value) self.session.add(entry) self.session.commit() @@ -46,7 +46,7 @@ class Config(object): try: entry = self.session.query(DB.Config).filter(DB.Config.key == key).one() return entry.value - except sqlalchemy.orm.exc.NoResultFound, e: + except sqlalchemy.orm.exc.NoResultFound as e: pass return default @@ -56,7 +56,7 @@ class Config(object): self.session.delete(entry) self.session.commit() return True - except sqlalchemy.orm.exc.NoResultFound, e: + except sqlalchemy.orm.exc.NoResultFound as e: pass return False diff --git a/biarchtool.py b/biarchtool.py index 5a6398bc..1af13f01 100755 --- a/biarchtool.py +++ b/biarchtool.py @@ -147,7 +147,7 @@ class BiArchTool(ToolBase.ToolBase): try: x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, pkg, '_history'], {'rev':'1'}))) # catch deleted packages - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: continue raise e @@ -188,7 +188,7 @@ class BiArchTool(ToolBase.ToolBase): self.http_PUT(pkgmetaurl, data=ET.tostring(pkgmeta)) if self.caching: self._invalidate__cached_GET(pkgmetaurl) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: logger.error('failed to update %s: %s', pkg, e) def add_explicit_disable(self, wipebinaries=False): @@ -225,7 +225,7 @@ class BiArchTool(ToolBase.ToolBase): 'cmd' : 'wipe', 'arch': self.arch, 'package' : pkg })) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: logger.error('failed to update %s: %s', pkg, e) @@ -236,7 +236,7 @@ class BiArchTool(ToolBase.ToolBase): pkgmetaurl = self.makeurl(['source', self.project, pkg, '_meta']) try: pkgmeta = ET.fromstring(self.cached_GET(pkgmetaurl)) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: # catch deleted packages if e.code == 404: continue @@ -303,7 +303,7 @@ class BiArchTool(ToolBase.ToolBase): 'cmd' : 'wipe', 'arch': self.arch, 'package' : pkg })) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: logger.error('failed to update %s: %s', pkg, e) class CommandLineInterface(ToolBase.CommandLineInterface): diff --git a/check_source.py b/check_source.py index dbdf7e54..6333837d 100755 --- a/check_source.py +++ b/check_source.py @@ -139,7 +139,7 @@ class CheckSource(ReviewBot.ReviewBot): def staging_group(self, project): try: return self.staging_api(project).cstaging_group - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code != 404: raise e @@ -176,7 +176,7 @@ class CheckSource(ReviewBot.ReviewBot): try: xml = ET.parse(osc.core.http_GET(url)).getroot() - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: self.logger.error('ERROR in URL %s [%s]' % (url, e)) return ret diff --git a/check_source_in_factory.py b/check_source_in_factory.py index 9b7dc7f7..db5373f6 100755 --- a/check_source_in_factory.py +++ b/check_source_in_factory.py @@ -123,7 +123,7 @@ class FactorySourceChecker(ReviewBot.ReviewBot): u = osc.core.makeurl(self.apiurl, [ 'source', project, package, '_history' ], { 'limit': self.history_limit }) try: r = osc.core.http_GET(u) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: self.logger.debug("package has no history!?") return None diff --git a/devel-project.py b/devel-project.py index dcd99a9b..6702f088 100755 --- a/devel-project.py +++ b/devel-project.py @@ -199,7 +199,7 @@ def remind_comment(apiurl, repeat_age, request_id, project, package=None): # Repeat notification so remove old comment. try: comment_api.delete(comment['id']) - except HTTPError, e: + except HTTPError as e: if e.code == 403: # Gracefully skip when previous reminder was by another user. print(' unable to remove previous reminder') diff --git a/fcc_submitter.py b/fcc_submitter.py index d70e1345..c07a9397 100755 --- a/fcc_submitter.py +++ b/fcc_submitter.py @@ -128,7 +128,7 @@ class FccFreezer(object): l = ET.tostring(flink) try: http_PUT(url, data=l) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: raise e class FccSubmitter(object): @@ -225,7 +225,7 @@ class FccSubmitter(object): def check_multiple_specfiles(self, project, package): try: url = makeurl(self.apiurl, ['source', project, package], { 'expand': '1' } ) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: return None raise e diff --git a/issue-diff.py b/issue-diff.py index ecfd4c37..b7126d16 100755 --- a/issue-diff.py +++ b/issue-diff.py @@ -79,7 +79,7 @@ def bug_owner(apiurl, package, entity='person'): def bug_meta_get(bugzilla_api, bug_id): try: bug = bugzilla_api.getbug(bug_id) - except Fault, e: + except Fault as e: print('bug_meta_get(): ' + str(e)) return None return bug.component @@ -382,7 +382,7 @@ def main(args): try: bug_id = bug_create(bugzilla_api, meta, owner, cc, summary, message) break - except Fault, e: + except Fault as e: if 'There is no component named' in e.faultString: print('Invalid component {}, fallback to default'.format(meta[1])) meta = (meta[0], bugzilla_defaults[1], meta[2]) diff --git a/leaper.py b/leaper.py index 8f7e1895..dc84a44d 100755 --- a/leaper.py +++ b/leaper.py @@ -93,7 +93,7 @@ class Leaper(ReviewBot.ReviewBot): root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl,['source', project], query=query))).getroot() packages = [i.get('name') for i in root.findall('entry')] - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: # in case the project doesn't exist yet (like sle update) if e.code != 404: raise e diff --git a/manager_42.py b/manager_42.py index 6d971228..5ccc7124 100755 --- a/manager_42.py +++ b/manager_42.py @@ -87,7 +87,7 @@ class Manager42(object): self.lookup = {} try: self.lookup = yaml.safe_load(self._load_lookup_file(project)) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code != 404: raise @@ -119,7 +119,7 @@ class Manager42(object): def retried_GET(self, url): try: return http_GET(url) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if 500 <= e.code <= 599: logger.warn('Retrying {}'.format(url)) time.sleep(1) @@ -136,7 +136,7 @@ class Manager42(object): query=query))) packages = [i.get('name') for i in root.findall('entry')] - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: logger.error("{}: {}".format(project, e)) packages = [] @@ -161,7 +161,7 @@ class Manager42(object): for package in sorted(packages): try: self.check_one_package(package) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: logger.error("Failed to check {}: {}".format(package, e)) pass @@ -179,7 +179,7 @@ class Manager42(object): query['deleted'] = 1 return self.cached_GET(makeurl(self.apiurl, ['source', project, package, '_history'], query)) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: return None raise diff --git a/osclib/accept_command.py b/osclib/accept_command.py index 52604704..0048e084 100644 --- a/osclib/accept_command.py +++ b/osclib/accept_command.py @@ -177,7 +177,7 @@ class AcceptCommand(object): url = self.api.makeurl(['build', project], query) try: http_POST(url) - except urllib2.HTTPError, err: + except urllib2.HTTPError as err: # failed to wipe isos but we can just continue pass @@ -229,7 +229,7 @@ class AcceptCommand(object): print "Deleting package %s from project %s" % (spec[:-5], project) try: http_DELETE(url) - except urllib2.HTTPError, err: + except urllib2.HTTPError as err: if err.code == 404: # the package link was not yet created, which was likely a mistake from earlier pass diff --git a/osclib/adi_command.py b/osclib/adi_command.py index c6f54137..ca59b615 100644 --- a/osclib/adi_command.py +++ b/osclib/adi_command.py @@ -54,8 +54,8 @@ class AdiCommand: self.api.accept_status_comment(project, packages) try: delete_project(self.api.apiurl, project, force=True) - except urllib2.HTTPError, e: - print e + except urllib2.HTTPError as e: + print(e) pass else: print query_project, Fore.GREEN + 'ready:', ', '.join(['{}[{}]'.format( diff --git a/osclib/cycle.py b/osclib/cycle.py index 2d2a6f92..5c27b59d 100644 --- a/osclib/cycle.py +++ b/osclib/cycle.py @@ -157,7 +157,7 @@ class CycleDetector(object): # print('Generating _builddepinfo for (%s, %s, %s)' % (project, repository, arch)) url = makeurl(self.api.apiurl, ['build/%s/%s/%s/_builddepinfo' % (project, repository, arch)]) root = http_GET(url).read() - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: print('ERROR in URL %s [%s]' % (url, e)) return root diff --git a/osclib/prio_command.py b/osclib/prio_command.py index f5e6b9c5..0532cb9b 100644 --- a/osclib/prio_command.py +++ b/osclib/prio_command.py @@ -44,7 +44,7 @@ class PrioCommand(object): try: osc.core.http_POST(url, data=message) print reqid, r['by'], priority - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: print e diff --git a/osclib/repair_command.py b/osclib/repair_command.py index f99290b6..06775a62 100644 --- a/osclib/repair_command.py +++ b/osclib/repair_command.py @@ -37,7 +37,7 @@ class RepairCommand(object): staging_project = reviews[0] try: data = self.api.get_prj_pseudometa(staging_project) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: data = None diff --git a/osclib/stagingapi.py b/osclib/stagingapi.py index 76979f2f..e7d7255f 100644 --- a/osclib/stagingapi.py +++ b/osclib/stagingapi.py @@ -152,7 +152,7 @@ class StagingAPI(object): if data is not None: return func(url, data=data) return func(url) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if 500 <= e.code <= 599: print 'Error {}, retrying {} in {}s'.format(e.code, url, retry_sleep_seconds) time.sleep(retry_sleep_seconds) @@ -290,7 +290,7 @@ class StagingAPI(object): content = http_GET(url) for entry in ET.parse(content).getroot().findall('entry'): filelist.append(entry.attrib['name']) - except urllib2.HTTPError, err: + except urllib2.HTTPError as err: if err.code == 404: # The package we were supposed to query does not exist # we can pass this up and return the empty filelist @@ -394,7 +394,7 @@ class StagingAPI(object): try: url = self.makeurl(['source', prj, '_project', '_frozenlinks'], {'meta': '1'}) root = ET.parse(http_GET(url)).getroot() - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: return None packages = root.findall('./frozenlink/package') @@ -1468,7 +1468,7 @@ class StagingAPI(object): url = self.makeurl(['build', project, repository, arch, '_repository', "%s?view=fileinfo" % rpm]) try: return ET.parse(http_GET(url)).getroot().find('version').text - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: return None raise @@ -1764,7 +1764,7 @@ class StagingAPI(object): node = ET.fromstring(''.join(m)).find('devel') if node is not None: return node.get('project') - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: pass return None diff --git a/suppkg_rebuild.py b/suppkg_rebuild.py index d385ced9..8533c145 100755 --- a/suppkg_rebuild.py +++ b/suppkg_rebuild.py @@ -100,7 +100,7 @@ class StagingHelper(object): url = makeurl(self.apiurl, ['source', project, pkg], query=query) try: root = ET.parse(http_GET(url)).getroot() - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if e.code == 404: continue raise diff --git a/totest-manager.py b/totest-manager.py index 925fcaa1..224eeef4 100755 --- a/totest-manager.py +++ b/totest-manager.py @@ -457,7 +457,7 @@ class ToTestBase(object): def totest(self): try: current_snapshot = self.get_current_snapshot() - except NotFoundException, e: + except NotFoundException as e: # nothing in :ToTest (yet) logger.warn(e) current_snapshot = None @@ -769,7 +769,7 @@ class CommandlineInterface(cmdln.Cmdln): try: totest = self._setup_totest(project) totest.totest() - except Exception, e: + except Exception as e: logger.error(e) if opts.interval: diff --git a/update_crawler.py b/update_crawler.py index ed69adff..9e6dbcea 100755 --- a/update_crawler.py +++ b/update_crawler.py @@ -99,7 +99,7 @@ class UpdateCrawler(object): def retried_GET(self, url): try: return http_GET(url) - except urllib2.HTTPError, e: + except urllib2.HTTPError as e: if 500 <= e.code <= 599: print 'Retrying {}'.format(url) time.sleep(1) @@ -211,7 +211,7 @@ class UpdateCrawler(object): ))) if root.get('project') is None and root.get('cicount'): return True - except urllib2.HTTPError, err: + except urllib2.HTTPError as err: # if there is no link, it can't be a link if err.code == 404: return False