Another set of low-noise flake8 cleanups

This commit is contained in:
Dirk Mueller 2017-10-20 08:54:37 +02:00
parent ed01f0681d
commit d1c4801578
15 changed files with 29 additions and 29 deletions

View File

@ -1,4 +1,4 @@
[flake8] [flake8]
exclude = abichecker, openqa, openqa-maintenance.py exclude = abichecker, openqa, openqa-maintenance.py
max-line-length = 100 max-line-length = 100
ignore = E501,F401,E302,E228,E128,E251,E201,E202,E203,E305,F841,E265,E261,E266,E231,E712,E401,E126,E502,E222,E241,E711,E226,E125,E123,W293,W391,E731,E303,E101,E227,E713,E225,E124,E221,E127,E701,W601,E714,W503 ignore = E501,F401,E302,E228,E128,E251,E201,E202,E203,E305,F841,E265,E261,E266,E712,E401,E126,E502,E222,E241,E711,E226,E125,E123,W293,W391,E731,E101,E227,E713,E225,E124,E221,E127,E701,E714,W503,E129,E303

View File

@ -119,7 +119,7 @@ class BiArchTool(ToolBase.ToolBase):
if self.rdeps is not None: if self.rdeps is not None:
return return
self.rdeps = dict() self.rdeps = dict()
url = self.makeurl(['build', self.project, 'standard', self.arch, '_builddepinfo' ], {'view':'revpkgnames'}) url = self.makeurl(['build', self.project, 'standard', self.arch, '_builddepinfo' ], {'view': 'revpkgnames'})
x = ET.fromstring(self.cached_GET(url)) x = ET.fromstring(self.cached_GET(url))
for pnode in x.findall('package'): for pnode in x.findall('package'):
name = pnode.get('name') name = pnode.get('name')
@ -141,11 +141,11 @@ class BiArchTool(ToolBase.ToolBase):
# generator. Yield only packges that got checked in after that # generator. Yield only packges that got checked in after that
# point in time. # point in time.
def _filter_packages_by_time(self, packages): def _filter_packages_by_time(self, packages):
x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, '_product', '_history'], {'limit':'1'}))) x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, '_product', '_history'], {'limit': '1'})))
producttime = int(x.find('./revision/time').text) producttime = int(x.find('./revision/time').text)
for pkg in packages: for pkg in packages:
try: try:
x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, pkg, '_history'], {'rev':'1'}))) x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, pkg, '_history'], {'rev': '1'})))
# catch deleted packages # catch deleted packages
except urllib2.HTTPError as e: except urllib2.HTTPError as e:
if e.code == 404: if e.code == 404:

View File

@ -43,7 +43,7 @@ URL="https://build.opensuse.org/project/status?&project=%s&ignore_pending=true&l
FACTORY='openSUSE:Factory' FACTORY='openSUSE:Factory'
class RemindedPackage(object): class RemindedPackage(object):
def __init__(self,firstfail,reminded,remindCount,bug): def __init__(self, firstfail, reminded, remindCount, bug):
self.firstfail=firstfail self.firstfail=firstfail
self.reminded=reminded self.reminded=reminded
self.bug=bug self.bug=bug
@ -190,7 +190,7 @@ def main(args):
msg['Date'] = email.utils.formatdate() msg['Date'] = email.utils.formatdate()
msg['Message-ID'] = email.utils.make_msgid() msg['Message-ID'] = email.utils.make_msgid()
msg.add_header('Precedence', 'bulk') msg.add_header('Precedence', 'bulk')
msg.add_header('X-Mailer','%s - Failure Notification' % project) msg.add_header('X-Mailer', '%s - Failure Notification' % project)
logger.info("%s: %s", msg['To'], msg['Subject']) logger.info("%s: %s", msg['To'], msg['Subject'])
if args.dry: if args.dry:
logger.debug(msg.as_string()) logger.debug(msg.as_string())

View File

@ -50,14 +50,14 @@ class CompareList(object):
def get_source_packages(self, project): def get_source_packages(self, project):
"""Return the list of packages in a project.""" """Return the list of packages in a project."""
query = {'expand': 1} query = {'expand': 1}
root = ET.parse(http_GET(makeurl(self.apiurl,['source', project], root = ET.parse(http_GET(makeurl(self.apiurl, ['source', project],
query=query))).getroot() query=query))).getroot()
packages = [i.get('name') for i in root.findall('entry')] packages = [i.get('name') for i in root.findall('entry')]
return packages return packages
def is_linked_package(self, project, package): def is_linked_package(self, project, package):
u = makeurl(self.apiurl,['source', project, package]) u = makeurl(self.apiurl, ['source', project, package])
root = ET.parse(http_GET(u)).getroot() root = ET.parse(http_GET(u)).getroot()
linked = root.find('linkinfo') linked = root.find('linkinfo')
return linked return linked
@ -68,7 +68,7 @@ class CompareList(object):
'view': 'xml', 'view': 'xml',
'oproject': old_prj, 'oproject': old_prj,
'opackage': package} 'opackage': package}
u = makeurl(self.apiurl,['source', new_prj, package], query=query) u = makeurl(self.apiurl, ['source', new_prj, package], query=query)
root = ET.parse(http_POST(u)).getroot() root = ET.parse(http_POST(u)).getroot()
old_srcmd5 = root.findall('old')[0].get('srcmd5') old_srcmd5 = root.findall('old')[0].get('srcmd5')
logging.debug('%s old srcmd5 %s in %s' % (package, old_srcmd5, old_prj)) logging.debug('%s old srcmd5 %s in %s' % (package, old_srcmd5, old_prj))

View File

@ -44,7 +44,7 @@ config_defaults = {
'url' : "http://download.opensuse.org/tumbleweed/iso/", 'url' : "http://download.opensuse.org/tumbleweed/iso/",
'iso' : "openSUSE-Tumbleweed-DVD-x86_64-Current.iso", 'iso' : "openSUSE-Tumbleweed-DVD-x86_64-Current.iso",
'name' : 'factory-announcer', 'name' : 'factory-announcer',
'subject' :'New Tumbleweed snapshot {version} released!', 'subject' : 'New Tumbleweed snapshot {version} released!',
'changesfile' : "Changes.{version}.txt", 'changesfile' : "Changes.{version}.txt",
'bodytemplate' : """ 'bodytemplate' : """
Please note that this mail was generated by a script. Please note that this mail was generated by a script.

View File

@ -49,7 +49,7 @@ def list():
for i in sorted(os.listdir(_dir), reverse=True): for i in sorted(os.listdir(_dir), reverse=True):
if not digits_re.match(i): if not digits_re.match(i):
continue continue
ret = ret + '<a href="diff/%s">%s</a>'%(i,i) ret = ret + '<a href="diff/%s">%s</a>'%(i, i)
if i == current: if i == current:
ret = ret + " &lt;--" ret = ret + " &lt;--"
ret = ret + '<br/>' ret = ret + '<br/>'
@ -67,7 +67,7 @@ def current():
return "malformed version", 400 return "malformed version", 400
if not os.path.exists(os.path.join(_dir, version)): if not os.path.exists(os.path.join(_dir, version)):
return "invalid version", 400 return "invalid version", 400
tmpfn = os.path.join(_dir,'.'+version) tmpfn = os.path.join(_dir, '.'+version)
app.logger.debug(tmpfn) app.logger.debug(tmpfn)
if os.path.exists(tmpfn): if os.path.exists(tmpfn):
os.unlink(tmpfn) os.unlink(tmpfn)

View File

@ -157,7 +157,7 @@ class FccSubmitter(object):
def get_source_packages(self, project, expand=False): def get_source_packages(self, project, expand=False):
"""Return the list of packages in a project.""" """Return the list of packages in a project."""
query = {'expand': 1} if expand else {} query = {'expand': 1} if expand else {}
root = ET.parse(http_GET(makeurl(self.apiurl,['source', project], root = ET.parse(http_GET(makeurl(self.apiurl, ['source', project],
query=query))).getroot() query=query))).getroot()
packages = [i.get('name') for i in root.findall('entry')] packages = [i.get('name') for i in root.findall('entry')]
@ -168,7 +168,7 @@ class FccSubmitter(object):
def get_link(self, project, package): def get_link(self, project, package):
try: try:
link = http_GET(makeurl(self.apiurl,['source', project, package, '_link'])).read() link = http_GET(makeurl(self.apiurl, ['source', project, package, '_link'])).read()
except (urllib2.HTTPError, urllib2.URLError): except (urllib2.HTTPError, urllib2.URLError):
return None return None
return ET.fromstring(link) return ET.fromstring(link)

View File

@ -90,7 +90,7 @@ class Leaper(ReviewBot.ReviewBot):
"""Return the list of packages in a project.""" """Return the list of packages in a project."""
query = {'expand': 1} if expand else {} query = {'expand': 1} if expand else {}
try: try:
root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl,['source', project], root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl, ['source', project],
query=query))).getroot() query=query))).getroot()
packages = [i.get('name') for i in root.findall('entry')] packages = [i.get('name') for i in root.findall('entry')]
except urllib2.HTTPError as e: except urllib2.HTTPError as e:

View File

@ -19,8 +19,9 @@ from osclib.conf import Config
from osclib.stagingapi import StagingAPI from osclib.stagingapi import StagingAPI
# Duplicate Leap config to handle 13.2 without issue. # Duplicate Leap config to handle 13.2 without issue.
osclib.conf.DEFAULT[r'openSUSE:(?P<project>[\d.]+)'] = \ osclib.conf.DEFAULT[
osclib.conf.DEFAULT[r'openSUSE:(?P<project>Leap:[\d.]+)'] r'openSUSE:(?P<project>[\d.]+)'] = osclib.conf.DEFAULT[
r'openSUSE:(?P<project>Leap:[\d.]+)']
# Provide osc.core.get_request_list() that swaps out search() implementation and # Provide osc.core.get_request_list() that swaps out search() implementation and
# uses lxml ET to avoid having to reparse to peform complex xpaths. # uses lxml ET to avoid having to reparse to peform complex xpaths.

View File

@ -27,10 +27,10 @@ def _checker_check_dups(self, project, opts):
continue continue
# print(id) # print(id)
# ET.dump(target) # ET.dump(target)
if not target.attrib.has_key('package'): if 'package' not in target.attrib:
continue continue
package = target.attrib['package'] package = target.attrib['package']
if rqs.has_key(type + package): if type + package in rqs:
[oldid, oldsource] = rqs[type + package] [oldid, oldsource] = rqs[type + package]
if oldid > id: if oldid > id:
s = oldid s = oldid

View File

@ -176,7 +176,7 @@ class Cache(object):
data = StringIO(text) data = StringIO(text)
if conf.config['debug']: print('CACHE_PUT', url, project, file=sys.stderr) if conf.config['debug']: print('CACHE_PUT', url, project, file=sys.stderr)
f = open(path,'w') f = open(path, 'w')
f.write(text) f.write(text)
f.close() f.close()

View File

@ -67,7 +67,7 @@ class StagingHelper(object):
def get_project_binarylist(self, project, repository, arch): def get_project_binarylist(self, project, repository, arch):
query = {'view': 'binarylist', 'repository': repository, 'arch': arch} query = {'view': 'binarylist', 'repository': repository, 'arch': arch}
root = ET.parse(http_GET(makeurl(self.apiurl,['build', project, '_result'], root = ET.parse(http_GET(makeurl(self.apiurl, ['build', project, '_result'],
query=query))).getroot() query=query))).getroot()
return root return root

View File

@ -11,9 +11,9 @@ import re
results = [] results = []
repo = "" repo = ""
architectures = ["x86_64","i586"] architectures = ["x86_64", "i586"]
pkg = "" pkg = ""
projects = ['openSUSE:Factory','openSUSE:Factory:Rebuild'] projects = ['openSUSE:Factory', 'openSUSE:Factory:Rebuild']
#initialize osc config #initialize osc config
osc.conf.get_config() osc.conf.get_config()

View File

@ -28,10 +28,10 @@ import urlparse
import sys import sys
import re import re
from osclib.cache import Cache from osclib.cache import Cache
sys.path.append(".")
from check_tags_in_requests import TagChecker from check_tags_in_requests import TagChecker
sys.path.append(".")
APIURL = 'https://maintenancetest.example.com' APIURL = 'https://maintenancetest.example.com'
FIXTURES = os.path.join(os.getcwd(), 'tests/fixtures') FIXTURES = os.path.join(os.getcwd(), 'tests/fixtures')
@ -208,7 +208,7 @@ Pico text editor while also offering a few enhancements.</description>
body=self._request_withhistory) body=self._request_withhistory)
httpretty.register_uri(httpretty.GET, httpretty.register_uri(httpretty.GET,
re.compile (re.escape(APIURL + "/search/request?")), re.compile(re.escape(APIURL + "/search/request?")),
match_querystring=True, match_querystring=True,
body='<collection matches="0"></collection>') body='<collection matches="0"></collection>')

View File

@ -172,7 +172,7 @@ class UpdateCrawler(object):
srcrev = a.src_rev srcrev = a.src_rev
# sometimes requests only contain the decimal revision # sometimes requests only contain the decimal revision
if re.match(r'^\d+$', srcrev) is not None: if re.match(r'^\d+$', srcrev) is not None:
xml = ET.fromstring(self._get_source_package(src_project,src_package, srcrev)) xml = ET.fromstring(self._get_source_package(src_project, src_package, srcrev))
srcrev = xml.get('verifymd5') srcrev = xml.get('verifymd5')
logging.debug('rev {}'.format(srcrev)) logging.debug('rev {}'.format(srcrev))
if srcrev == rev: if srcrev == rev:
@ -351,7 +351,6 @@ def main(args):
logging.debug("skipped packages: %s", pformat(uc.skipped)) logging.debug("skipped packages: %s", pformat(uc.skipped))
if __name__ == '__main__': if __name__ == '__main__':
description = 'Create update SRs for Leap.' description = 'Create update SRs for Leap.'
parser = argparse.ArgumentParser(description=description) parser = argparse.ArgumentParser(description=description)