diff --git a/.flake8 b/.flake8 index 59d5c72e..a386fecd 100644 --- a/.flake8 +++ b/.flake8 @@ -1,4 +1,4 @@ [flake8] exclude = abichecker, openqa, openqa-maintenance.py max-line-length = 100 -ignore = E501,F401,E302,E228,E128,E251,E201,E202,E203,E305,F841,E265,E261,E266,E231,E712,E401,E126,E502,E222,E241,E711,E226,E125,E123,W293,W391,E731,E303,E101,E227,E713,E225,E124,E221,E127,E701,W601,E714,W503 +ignore = E501,F401,E302,E228,E128,E251,E201,E202,E203,E305,F841,E265,E261,E266,E712,E401,E126,E502,E222,E241,E711,E226,E125,E123,W293,W391,E731,E101,E227,E713,E225,E124,E221,E127,E701,E714,W503,E129,E303 diff --git a/biarchtool.py b/biarchtool.py index 1af13f01..b3c774c3 100755 --- a/biarchtool.py +++ b/biarchtool.py @@ -119,7 +119,7 @@ class BiArchTool(ToolBase.ToolBase): if self.rdeps is not None: return self.rdeps = dict() - url = self.makeurl(['build', self.project, 'standard', self.arch, '_builddepinfo' ], {'view':'revpkgnames'}) + url = self.makeurl(['build', self.project, 'standard', self.arch, '_builddepinfo' ], {'view': 'revpkgnames'}) x = ET.fromstring(self.cached_GET(url)) for pnode in x.findall('package'): name = pnode.get('name') @@ -141,11 +141,11 @@ class BiArchTool(ToolBase.ToolBase): # generator. Yield only packges that got checked in after that # point in time. def _filter_packages_by_time(self, packages): - x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, '_product', '_history'], {'limit':'1'}))) + x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, '_product', '_history'], {'limit': '1'}))) producttime = int(x.find('./revision/time').text) for pkg in packages: try: - x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, pkg, '_history'], {'rev':'1'}))) + x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, pkg, '_history'], {'rev': '1'}))) # catch deleted packages except urllib2.HTTPError as e: if e.code == 404: diff --git a/build-fail-reminder.py b/build-fail-reminder.py index d34eba41..d4c53c89 100755 --- a/build-fail-reminder.py +++ b/build-fail-reminder.py @@ -43,7 +43,7 @@ URL="https://build.opensuse.org/project/status?&project=%s&ignore_pending=true&l FACTORY='openSUSE:Factory' class RemindedPackage(object): - def __init__(self,firstfail,reminded,remindCount,bug): + def __init__(self, firstfail, reminded, remindCount, bug): self.firstfail=firstfail self.reminded=reminded self.bug=bug @@ -190,7 +190,7 @@ def main(args): msg['Date'] = email.utils.formatdate() msg['Message-ID'] = email.utils.make_msgid() msg.add_header('Precedence', 'bulk') - msg.add_header('X-Mailer','%s - Failure Notification' % project) + msg.add_header('X-Mailer', '%s - Failure Notification' % project) logger.info("%s: %s", msg['To'], msg['Subject']) if args.dry: logger.debug(msg.as_string()) diff --git a/compare_pkglist.py b/compare_pkglist.py index b59c71e6..420386da 100755 --- a/compare_pkglist.py +++ b/compare_pkglist.py @@ -50,14 +50,14 @@ class CompareList(object): def get_source_packages(self, project): """Return the list of packages in a project.""" query = {'expand': 1} - root = ET.parse(http_GET(makeurl(self.apiurl,['source', project], + root = ET.parse(http_GET(makeurl(self.apiurl, ['source', project], query=query))).getroot() packages = [i.get('name') for i in root.findall('entry')] - + return packages def is_linked_package(self, project, package): - u = makeurl(self.apiurl,['source', project, package]) + u = makeurl(self.apiurl, ['source', project, package]) root = ET.parse(http_GET(u)).getroot() linked = root.find('linkinfo') return linked @@ -68,7 +68,7 @@ class CompareList(object): 'view': 'xml', 'oproject': old_prj, 'opackage': package} - u = makeurl(self.apiurl,['source', new_prj, package], query=query) + u = makeurl(self.apiurl, ['source', new_prj, package], query=query) root = ET.parse(http_POST(u)).getroot() old_srcmd5 = root.findall('old')[0].get('srcmd5') logging.debug('%s old srcmd5 %s in %s' % (package, old_srcmd5, old_prj)) diff --git a/factory-package-news/announcer.py b/factory-package-news/announcer.py index 0c7bce82..5dc03969 100755 --- a/factory-package-news/announcer.py +++ b/factory-package-news/announcer.py @@ -44,7 +44,7 @@ config_defaults = { 'url' : "http://download.opensuse.org/tumbleweed/iso/", 'iso' : "openSUSE-Tumbleweed-DVD-x86_64-Current.iso", 'name' : 'factory-announcer', - 'subject' :'New Tumbleweed snapshot {version} released!', + 'subject' : 'New Tumbleweed snapshot {version} released!', 'changesfile' : "Changes.{version}.txt", 'bodytemplate' : """ Please note that this mail was generated by a script. diff --git a/factory-package-news/factory-package-news-web.py b/factory-package-news/factory-package-news-web.py index 58b6c54a..7f910ca8 100755 --- a/factory-package-news/factory-package-news-web.py +++ b/factory-package-news/factory-package-news-web.py @@ -49,7 +49,7 @@ def list(): for i in sorted(os.listdir(_dir), reverse=True): if not digits_re.match(i): continue - ret = ret + '%s'%(i,i) + ret = ret + '%s'%(i, i) if i == current: ret = ret + " <--" ret = ret + '
' @@ -67,7 +67,7 @@ def current(): return "malformed version", 400 if not os.path.exists(os.path.join(_dir, version)): return "invalid version", 400 - tmpfn = os.path.join(_dir,'.'+version) + tmpfn = os.path.join(_dir, '.'+version) app.logger.debug(tmpfn) if os.path.exists(tmpfn): os.unlink(tmpfn) diff --git a/fcc_submitter.py b/fcc_submitter.py index 3271bda4..0fd15a9b 100755 --- a/fcc_submitter.py +++ b/fcc_submitter.py @@ -157,7 +157,7 @@ class FccSubmitter(object): def get_source_packages(self, project, expand=False): """Return the list of packages in a project.""" query = {'expand': 1} if expand else {} - root = ET.parse(http_GET(makeurl(self.apiurl,['source', project], + root = ET.parse(http_GET(makeurl(self.apiurl, ['source', project], query=query))).getroot() packages = [i.get('name') for i in root.findall('entry')] @@ -168,7 +168,7 @@ class FccSubmitter(object): def get_link(self, project, package): try: - link = http_GET(makeurl(self.apiurl,['source', project, package, '_link'])).read() + link = http_GET(makeurl(self.apiurl, ['source', project, package, '_link'])).read() except (urllib2.HTTPError, urllib2.URLError): return None return ET.fromstring(link) diff --git a/leaper.py b/leaper.py index dc84a44d..118c806a 100755 --- a/leaper.py +++ b/leaper.py @@ -90,7 +90,7 @@ class Leaper(ReviewBot.ReviewBot): """Return the list of packages in a project.""" query = {'expand': 1} if expand else {} try: - root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl,['source', project], + root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl, ['source', project], query=query))).getroot() packages = [i.get('name') for i in root.findall('entry')] except urllib2.HTTPError as e: diff --git a/metrics.py b/metrics.py index 80ceec90..db0c50d7 100755 --- a/metrics.py +++ b/metrics.py @@ -19,8 +19,9 @@ from osclib.conf import Config from osclib.stagingapi import StagingAPI # Duplicate Leap config to handle 13.2 without issue. -osclib.conf.DEFAULT[r'openSUSE:(?P[\d.]+)'] = \ -osclib.conf.DEFAULT[r'openSUSE:(?PLeap:[\d.]+)'] +osclib.conf.DEFAULT[ + r'openSUSE:(?P[\d.]+)'] = osclib.conf.DEFAULT[ + r'openSUSE:(?PLeap:[\d.]+)'] # Provide osc.core.get_request_list() that swaps out search() implementation and # uses lxml ET to avoid having to reparse to peform complex xpaths. diff --git a/osc-check_dups.py b/osc-check_dups.py index 1c3e5999..c75115f2 100644 --- a/osc-check_dups.py +++ b/osc-check_dups.py @@ -27,10 +27,10 @@ def _checker_check_dups(self, project, opts): continue # print(id) # ET.dump(target) - if not target.attrib.has_key('package'): + if 'package' not in target.attrib: continue package = target.attrib['package'] - if rqs.has_key(type + package): + if type + package in rqs: [oldid, oldsource] = rqs[type + package] if oldid > id: s = oldid diff --git a/osclib/cache.py b/osclib/cache.py index 2522ba3a..b6b1171e 100644 --- a/osclib/cache.py +++ b/osclib/cache.py @@ -176,7 +176,7 @@ class Cache(object): data = StringIO(text) if conf.config['debug']: print('CACHE_PUT', url, project, file=sys.stderr) - f = open(path,'w') + f = open(path, 'w') f.write(text) f.close() diff --git a/suppkg_rebuild.py b/suppkg_rebuild.py index 8533c145..53faee37 100755 --- a/suppkg_rebuild.py +++ b/suppkg_rebuild.py @@ -67,7 +67,7 @@ class StagingHelper(object): def get_project_binarylist(self, project, repository, arch): query = {'view': 'binarylist', 'repository': repository, 'arch': arch} - root = ET.parse(http_GET(makeurl(self.apiurl,['build', project, '_result'], + root = ET.parse(http_GET(makeurl(self.apiurl, ['build', project, '_result'], query=query))).getroot() return root diff --git a/sync-rebuild.py b/sync-rebuild.py index 8b05f344..14c77f49 100755 --- a/sync-rebuild.py +++ b/sync-rebuild.py @@ -11,9 +11,9 @@ import re results = [] repo = "" -architectures = ["x86_64","i586"] +architectures = ["x86_64", "i586"] pkg = "" -projects = ['openSUSE:Factory','openSUSE:Factory:Rebuild'] +projects = ['openSUSE:Factory', 'openSUSE:Factory:Rebuild'] #initialize osc config osc.conf.get_config() diff --git a/tests/checktags_tests.py b/tests/checktags_tests.py index eb43bdff..9837a849 100644 --- a/tests/checktags_tests.py +++ b/tests/checktags_tests.py @@ -28,10 +28,10 @@ import urlparse import sys import re from osclib.cache import Cache -sys.path.append(".") - from check_tags_in_requests import TagChecker +sys.path.append(".") + APIURL = 'https://maintenancetest.example.com' FIXTURES = os.path.join(os.getcwd(), 'tests/fixtures') @@ -208,7 +208,7 @@ Pico text editor while also offering a few enhancements. body=self._request_withhistory) httpretty.register_uri(httpretty.GET, - re.compile (re.escape(APIURL + "/search/request?")), + re.compile(re.escape(APIURL + "/search/request?")), match_querystring=True, body='') diff --git a/update_crawler.py b/update_crawler.py index 9e6dbcea..928dc4e7 100755 --- a/update_crawler.py +++ b/update_crawler.py @@ -172,7 +172,7 @@ class UpdateCrawler(object): srcrev = a.src_rev # sometimes requests only contain the decimal revision if re.match(r'^\d+$', srcrev) is not None: - xml = ET.fromstring(self._get_source_package(src_project,src_package, srcrev)) + xml = ET.fromstring(self._get_source_package(src_project, src_package, srcrev)) srcrev = xml.get('verifymd5') logging.debug('rev {}'.format(srcrev)) if srcrev == rev: @@ -351,7 +351,6 @@ def main(args): logging.debug("skipped packages: %s", pformat(uc.skipped)) - if __name__ == '__main__': description = 'Create update SRs for Leap.' parser = argparse.ArgumentParser(description=description)