diff --git a/abichecker/abichecker.py b/abichecker/abichecker.py index 878a703e..6c2ebde8 100755 --- a/abichecker/abichecker.py +++ b/abichecker/abichecker.py @@ -481,7 +481,7 @@ class ABIChecker(ReviewBot.ReviewBot): # sometimes a previously released maintenance # update didn't cover all architectures. We can # only ignore that then. - self.logger.warn("couldn't find repo %s/%s in %s/%s"%(mr.dstrepo, mr.arch, originproject, originpackage)) + self.logger.warning("couldn't find repo %s/%s in %s/%s"%(mr.dstrepo, mr.arch, originproject, originpackage)) continue matchrepos.add(MR(mr.srcrepo, mapped[(mr.dstrepo, mr.arch)].srcrepo, mr.arch)) @@ -879,16 +879,16 @@ class ABIChecker(ReviewBot.ReviewBot): for mr in matchrepos: if not (mr.srcrepo, mr.arch) in rmap: - self.logger.warn("%s/%s had no build success"%(mr.srcrepo, mr.arch)) + self.logger.warning("%s/%s had no build success"%(mr.srcrepo, mr.arch)) raise NotReadyYet(src_project, src_srcinfo.package, "no result") if rmap[(mr.srcrepo, mr.arch)]['dirty']: - self.logger.warn("%s/%s dirty"%(mr.srcrepo, mr.arch)) + self.logger.warning("%s/%s dirty"%(mr.srcrepo, mr.arch)) raise NotReadyYet(src_project, src_srcinfo.package, "dirty") code = rmap[(mr.srcrepo, mr.arch)]['code'] if code == 'broken': raise SourceBroken(src_project, src_srcinfo.package) if code != 'succeeded' and code != 'locked' and code != 'excluded': - self.logger.warn("%s/%s not succeeded (%s)"%(mr.srcrepo, mr.arch, code)) + self.logger.warning("%s/%s not succeeded (%s)"%(mr.srcrepo, mr.arch, code)) raise NotReadyYet(src_project, src_srcinfo.package, code) def findrepos(self, src_project, src_srcinfo, dst_project, dst_srcinfo): diff --git a/biarchtool.py b/biarchtool.py index 30c0f8d6..31df6824 100755 --- a/biarchtool.py +++ b/biarchtool.py @@ -76,15 +76,15 @@ class BiArchTool(ToolBase.ToolBase): if 'baselibs.conf' in files: logger.debug('%s has baselibs', package) if is_multibuild: - logger.warn('%s is multibuild and has baselibs. canot handle that!', package) + logger.warning('%s is multibuild and has baselibs. canot handle that!', package) else: ret = True elif '_link' in files: files = self.get_filelist(self.project, srcpkgname, expand = True) if 'baselibs.conf' in files: - logger.warn('%s is linked to a baselibs package', package) + logger.warning('%s is linked to a baselibs package', package) elif is_multibuild: - logger.warn('%s is multibuild', package) + logger.warning('%s is multibuild', package) self._has_baselibs[package] = ret return ret @@ -138,7 +138,7 @@ class BiArchTool(ToolBase.ToolBase): for depnode in pnode.findall('pkgdep'): depname = depnode.text if depname == name: - logger.warn('%s requires itself for build', name) + logger.warning('%s requires itself for build', name) continue self.rdeps.setdefault(name, set()).add(depname) diff --git a/build-fail-reminder.py b/build-fail-reminder.py index a5ab64f2..f46b4b77 100755 --- a/build-fail-reminder.py +++ b/build-fail-reminder.py @@ -185,9 +185,9 @@ def main(args): pass elif Reminded[package].remindCount == 3: - logger.warn( "Package '%s' has been failing for three weeks - let's create a bug report" % package) + logger.warning( "Package '%s' has been failing for three weeks - let's create a bug report" % package) else: - logger.warn( "Package '%s' is no longer maintained - send a mail to factory maintainers..." % package) + logger.warning( "Package '%s' is no longer maintained - send a mail to factory maintainers..." % package) if __name__ == '__main__': parser = argparse.ArgumentParser(description='boilerplate python commmand line program') diff --git a/check_source.py b/check_source.py index aaa33859..78443291 100755 --- a/check_source.py +++ b/check_source.py @@ -117,7 +117,7 @@ class CheckSource(ReviewBot.ReviewBot): # Checkout and see if renaming package screws up version parsing. dir = os.path.expanduser('~/co/%s' % self.request.reqid) if os.path.exists(dir): - self.logger.warn('directory %s already exists' % dir) + self.logger.warning('directory %s already exists' % dir) shutil.rmtree(dir) os.makedirs(dir) os.chdir(dir) diff --git a/leaper.py b/leaper.py index 353f01b2..057e3635 100755 --- a/leaper.py +++ b/leaper.py @@ -179,7 +179,7 @@ class Leaper(ReviewBot.ReviewBot): if src_srcinfo is None: # source package does not exist? # handle here to avoid crashing on the next line - self.logger.warn("Could not get source info for %s/%s@%s" % (src_project, src_package, src_rev)) + self.logger.warning("Could not get source info for %s/%s@%s" % (src_project, src_package, src_rev)) return False if self.ibs and target_project.startswith('SUSE:SLE'): @@ -545,7 +545,7 @@ class Leaper(ReviewBot.ReviewBot): elif self.source_in_factory: self.logger.info("perfect. the submitted sources are in or accepted for Factory") elif self.source_in_factory == False: - self.logger.warn("the submitted sources are NOT in Factory") + self.logger.warning("the submitted sources are NOT in Factory") if request_ok == False: self.logger.info("NOTE: if you think the automated review was wrong here, please talk to the release team before reopening the request") diff --git a/manager_42.py b/manager_42.py index be684f14..42b22c93 100755 --- a/manager_42.py +++ b/manager_42.py @@ -126,7 +126,7 @@ class Manager42(object): return http_GET(url) except HTTPError as e: if 500 <= e.code <= 599: - logger.warn('Retrying {}'.format(url)) + logger.warning('Retrying {}'.format(url)) time.sleep(1) return self.retried_GET(url) raise e @@ -292,7 +292,7 @@ class Manager42(object): if not linked is None and linked.get('package') != package: lstring = 'subpackage of {}'.format(linked.get('package')) if lstring != lproject: - logger.warn("{} links to {} (was {})".format(package, linked.get('package'), lproject)) + logger.warning("{} links to {} (was {})".format(package, linked.get('package'), lproject)) self.lookup[package] = lstring self.lookup_changes += 1 else: @@ -306,7 +306,7 @@ class Manager42(object): develpkg = None if devel is None: (dummy, develprj, develpkg) = lproject.split(';') - logger.warn('{} lacks devel project setting {}/{}'.format(package, develprj, develpkg)) + logger.warning('{} lacks devel project setting {}/{}'.format(package, develprj, develpkg)) else: develprj = devel.get('project') develpkg = devel.get('package') diff --git a/pkglistgen/group.py b/pkglistgen/group.py index 82c30921..05770a20 100644 --- a/pkglistgen/group.py +++ b/pkglistgen/group.py @@ -162,7 +162,7 @@ class Group(object): for s in self.silents: sel = pool.select(str(s), solv.Selection.SELECTION_NAME | solv.Selection.SELECTION_FLAT) if sel.isempty(): - self.logger.warn('{}.{}: silent package {} not found'.format(self.name, arch, s)) + self.logger.warning('{}.{}: silent package {} not found'.format(self.name, arch, s)) else: jobs += sel.jobs(solv.Job.SOLVER_INSTALL) diff --git a/pkglistgen/tool.py b/pkglistgen/tool.py index 6f899a39..5f1c2781 100644 --- a/pkglistgen/tool.py +++ b/pkglistgen/tool.py @@ -188,10 +188,10 @@ class PkgListGen(ToolBase.ToolBase): all_grouped.update(g.solved_packages[arch]) for p in tocheck - all_grouped: - self.logger.warn('package %s has supplements but is not grouped', p) + self.logger.warning('package %s has supplements but is not grouped', p) for p in tocheck_locales - all_grouped: - self.logger.warn('package %s provides supported locale but is not grouped', p) + self.logger.warning('package %s provides supported locale but is not grouped', p) def _prepare_pool(self, arch): pool = solv.Pool() diff --git a/repo_checker.py b/repo_checker.py index fd3987a9..c5a1b8e8 100755 --- a/repo_checker.py +++ b/repo_checker.py @@ -156,7 +156,7 @@ class RepoChecker(ReviewBot.ReviewBot): filename = self.project_pseudometa_file_name(project, repository) content = project_pseudometa_file_load(self.apiurl, project, filename) if not content: - self.logger.warn('no project_only run from which to extract existing problems') + self.logger.warning('no project_only run from which to extract existing problems') return binaries sections = self.install_check_parse(content) @@ -210,7 +210,7 @@ class RepoChecker(ReviewBot.ReviewBot): if p.returncode: self.logger.info('install check: failed') if p.returncode == 126: - self.logger.warn('mirror cache reset due to corruption') + self.logger.warning('mirror cache reset due to corruption') self._invalidate_all() elif parse: # Parse output for later consumption for posting comments. @@ -589,11 +589,11 @@ class RepoChecker(ReviewBot.ReviewBot): what_depends_on.remove(action.tgt_package) if len(what_depends_on): - self.logger.warn('{} is still a build requirement of:\n\n- {}'.format( + self.logger.warning('{} is still a build requirement of:\n\n- {}'.format( action.tgt_package, '\n- '.join(sorted(what_depends_on)))) if len(runtime_deps): - self.logger.warn('{} provides runtime dependencies to:\n\n- {}'.format( + self.logger.warning('{} provides runtime dependencies to:\n\n- {}'.format( action.tgt_package, '\n- '.join(runtime_deps))) if len(self.comment_handler.lines): diff --git a/staging-installcheck.py b/staging-installcheck.py index a40eb909..5d9be396 100755 --- a/staging-installcheck.py +++ b/staging-installcheck.py @@ -225,13 +225,13 @@ class InstallChecker(object): check = self.cycle_check(project, repository, arch) if not check.success: - self.logger.warn('Cycle check failed') + self.logger.warning('Cycle check failed') result_comment.append(check.comment) result = False check = self.install_check(target_pair, arch, directories, None, whitelist) if not check.success: - self.logger.warn('Install check failed') + self.logger.warning('Install check failed') result_comment.append(check.comment) result = False @@ -240,7 +240,7 @@ class InstallChecker(object): else: result_comment.insert(0, 'Generated from {}\n'.format(self.gocd_url())) self.report_state('failure', self.upload_failure(project, result_comment), project, repository, buildids) - self.logger.warn('Not accepting {}'.format(project)) + self.logger.warning('Not accepting {}'.format(project)) return False return result @@ -345,7 +345,7 @@ class InstallChecker(object): filename = self.project_pseudometa_file_name(project, repository) content = project_pseudometa_file_load(self.api.apiurl, project, filename) if not content: - self.logger.warn('no project_only run from which to extract existing problems') + self.logger.warning('no project_only run from which to extract existing problems') return binaries sections = self.install_check_parse(content) @@ -385,7 +385,7 @@ class InstallChecker(object): if p.returncode: self.logger.info('install check: failed') if p.returncode == 126: - self.logger.warn('mirror cache reset due to corruption') + self.logger.warning('mirror cache reset due to corruption') self._invalidate_all() elif parse: # Parse output for later consumption for posting comments. diff --git a/ttm/publisher.py b/ttm/publisher.py index aa4dd55a..07fd84de 100644 --- a/ttm/publisher.py +++ b/ttm/publisher.py @@ -154,7 +154,7 @@ class ToTestPublisher(ToTestManager): notify_connection.close() break except pika.exceptions.ConnectionClosed as e: - self.logger.warn('Sending AMQP event did not work: %s. Retrying try %s out of %s' % (e, t, tries)) + self.logger.warning('Sending AMQP event did not work: %s. Retrying try %s out of %s' % (e, t, tries)) else: self.logger.error('Could not send out AMQP event for %s tries, aborting.' % tries)