Another set of low-noise flake8 cleanups
This commit is contained in:
parent
ed01f0681d
commit
d1c4801578
2
.flake8
2
.flake8
@ -1,4 +1,4 @@
|
||||
[flake8]
|
||||
exclude = abichecker, openqa, openqa-maintenance.py
|
||||
max-line-length = 100
|
||||
ignore = E501,F401,E302,E228,E128,E251,E201,E202,E203,E305,F841,E265,E261,E266,E231,E712,E401,E126,E502,E222,E241,E711,E226,E125,E123,W293,W391,E731,E303,E101,E227,E713,E225,E124,E221,E127,E701,W601,E714,W503
|
||||
ignore = E501,F401,E302,E228,E128,E251,E201,E202,E203,E305,F841,E265,E261,E266,E712,E401,E126,E502,E222,E241,E711,E226,E125,E123,W293,W391,E731,E101,E227,E713,E225,E124,E221,E127,E701,E714,W503,E129,E303
|
||||
|
@ -119,7 +119,7 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
if self.rdeps is not None:
|
||||
return
|
||||
self.rdeps = dict()
|
||||
url = self.makeurl(['build', self.project, 'standard', self.arch, '_builddepinfo' ], {'view':'revpkgnames'})
|
||||
url = self.makeurl(['build', self.project, 'standard', self.arch, '_builddepinfo' ], {'view': 'revpkgnames'})
|
||||
x = ET.fromstring(self.cached_GET(url))
|
||||
for pnode in x.findall('package'):
|
||||
name = pnode.get('name')
|
||||
@ -141,11 +141,11 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
# generator. Yield only packges that got checked in after that
|
||||
# point in time.
|
||||
def _filter_packages_by_time(self, packages):
|
||||
x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, '_product', '_history'], {'limit':'1'})))
|
||||
x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, '_product', '_history'], {'limit': '1'})))
|
||||
producttime = int(x.find('./revision/time').text)
|
||||
for pkg in packages:
|
||||
try:
|
||||
x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, pkg, '_history'], {'rev':'1'})))
|
||||
x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, pkg, '_history'], {'rev': '1'})))
|
||||
# catch deleted packages
|
||||
except urllib2.HTTPError as e:
|
||||
if e.code == 404:
|
||||
|
@ -43,7 +43,7 @@ URL="https://build.opensuse.org/project/status?&project=%s&ignore_pending=true&l
|
||||
FACTORY='openSUSE:Factory'
|
||||
|
||||
class RemindedPackage(object):
|
||||
def __init__(self,firstfail,reminded,remindCount,bug):
|
||||
def __init__(self, firstfail, reminded, remindCount, bug):
|
||||
self.firstfail=firstfail
|
||||
self.reminded=reminded
|
||||
self.bug=bug
|
||||
@ -190,7 +190,7 @@ def main(args):
|
||||
msg['Date'] = email.utils.formatdate()
|
||||
msg['Message-ID'] = email.utils.make_msgid()
|
||||
msg.add_header('Precedence', 'bulk')
|
||||
msg.add_header('X-Mailer','%s - Failure Notification' % project)
|
||||
msg.add_header('X-Mailer', '%s - Failure Notification' % project)
|
||||
logger.info("%s: %s", msg['To'], msg['Subject'])
|
||||
if args.dry:
|
||||
logger.debug(msg.as_string())
|
||||
|
@ -50,14 +50,14 @@ class CompareList(object):
|
||||
def get_source_packages(self, project):
|
||||
"""Return the list of packages in a project."""
|
||||
query = {'expand': 1}
|
||||
root = ET.parse(http_GET(makeurl(self.apiurl,['source', project],
|
||||
root = ET.parse(http_GET(makeurl(self.apiurl, ['source', project],
|
||||
query=query))).getroot()
|
||||
packages = [i.get('name') for i in root.findall('entry')]
|
||||
|
||||
|
||||
return packages
|
||||
|
||||
def is_linked_package(self, project, package):
|
||||
u = makeurl(self.apiurl,['source', project, package])
|
||||
u = makeurl(self.apiurl, ['source', project, package])
|
||||
root = ET.parse(http_GET(u)).getroot()
|
||||
linked = root.find('linkinfo')
|
||||
return linked
|
||||
@ -68,7 +68,7 @@ class CompareList(object):
|
||||
'view': 'xml',
|
||||
'oproject': old_prj,
|
||||
'opackage': package}
|
||||
u = makeurl(self.apiurl,['source', new_prj, package], query=query)
|
||||
u = makeurl(self.apiurl, ['source', new_prj, package], query=query)
|
||||
root = ET.parse(http_POST(u)).getroot()
|
||||
old_srcmd5 = root.findall('old')[0].get('srcmd5')
|
||||
logging.debug('%s old srcmd5 %s in %s' % (package, old_srcmd5, old_prj))
|
||||
|
@ -44,7 +44,7 @@ config_defaults = {
|
||||
'url' : "http://download.opensuse.org/tumbleweed/iso/",
|
||||
'iso' : "openSUSE-Tumbleweed-DVD-x86_64-Current.iso",
|
||||
'name' : 'factory-announcer',
|
||||
'subject' :'New Tumbleweed snapshot {version} released!',
|
||||
'subject' : 'New Tumbleweed snapshot {version} released!',
|
||||
'changesfile' : "Changes.{version}.txt",
|
||||
'bodytemplate' : """
|
||||
Please note that this mail was generated by a script.
|
||||
|
@ -49,7 +49,7 @@ def list():
|
||||
for i in sorted(os.listdir(_dir), reverse=True):
|
||||
if not digits_re.match(i):
|
||||
continue
|
||||
ret = ret + '<a href="diff/%s">%s</a>'%(i,i)
|
||||
ret = ret + '<a href="diff/%s">%s</a>'%(i, i)
|
||||
if i == current:
|
||||
ret = ret + " <--"
|
||||
ret = ret + '<br/>'
|
||||
@ -67,7 +67,7 @@ def current():
|
||||
return "malformed version", 400
|
||||
if not os.path.exists(os.path.join(_dir, version)):
|
||||
return "invalid version", 400
|
||||
tmpfn = os.path.join(_dir,'.'+version)
|
||||
tmpfn = os.path.join(_dir, '.'+version)
|
||||
app.logger.debug(tmpfn)
|
||||
if os.path.exists(tmpfn):
|
||||
os.unlink(tmpfn)
|
||||
|
@ -157,7 +157,7 @@ class FccSubmitter(object):
|
||||
def get_source_packages(self, project, expand=False):
|
||||
"""Return the list of packages in a project."""
|
||||
query = {'expand': 1} if expand else {}
|
||||
root = ET.parse(http_GET(makeurl(self.apiurl,['source', project],
|
||||
root = ET.parse(http_GET(makeurl(self.apiurl, ['source', project],
|
||||
query=query))).getroot()
|
||||
packages = [i.get('name') for i in root.findall('entry')]
|
||||
|
||||
@ -168,7 +168,7 @@ class FccSubmitter(object):
|
||||
|
||||
def get_link(self, project, package):
|
||||
try:
|
||||
link = http_GET(makeurl(self.apiurl,['source', project, package, '_link'])).read()
|
||||
link = http_GET(makeurl(self.apiurl, ['source', project, package, '_link'])).read()
|
||||
except (urllib2.HTTPError, urllib2.URLError):
|
||||
return None
|
||||
return ET.fromstring(link)
|
||||
|
@ -90,7 +90,7 @@ class Leaper(ReviewBot.ReviewBot):
|
||||
"""Return the list of packages in a project."""
|
||||
query = {'expand': 1} if expand else {}
|
||||
try:
|
||||
root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl,['source', project],
|
||||
root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl, ['source', project],
|
||||
query=query))).getroot()
|
||||
packages = [i.get('name') for i in root.findall('entry')]
|
||||
except urllib2.HTTPError as e:
|
||||
|
@ -19,8 +19,9 @@ from osclib.conf import Config
|
||||
from osclib.stagingapi import StagingAPI
|
||||
|
||||
# Duplicate Leap config to handle 13.2 without issue.
|
||||
osclib.conf.DEFAULT[r'openSUSE:(?P<project>[\d.]+)'] = \
|
||||
osclib.conf.DEFAULT[r'openSUSE:(?P<project>Leap:[\d.]+)']
|
||||
osclib.conf.DEFAULT[
|
||||
r'openSUSE:(?P<project>[\d.]+)'] = osclib.conf.DEFAULT[
|
||||
r'openSUSE:(?P<project>Leap:[\d.]+)']
|
||||
|
||||
# Provide osc.core.get_request_list() that swaps out search() implementation and
|
||||
# uses lxml ET to avoid having to reparse to peform complex xpaths.
|
||||
|
@ -27,10 +27,10 @@ def _checker_check_dups(self, project, opts):
|
||||
continue
|
||||
# print(id)
|
||||
# ET.dump(target)
|
||||
if not target.attrib.has_key('package'):
|
||||
if 'package' not in target.attrib:
|
||||
continue
|
||||
package = target.attrib['package']
|
||||
if rqs.has_key(type + package):
|
||||
if type + package in rqs:
|
||||
[oldid, oldsource] = rqs[type + package]
|
||||
if oldid > id:
|
||||
s = oldid
|
||||
|
@ -176,7 +176,7 @@ class Cache(object):
|
||||
data = StringIO(text)
|
||||
|
||||
if conf.config['debug']: print('CACHE_PUT', url, project, file=sys.stderr)
|
||||
f = open(path,'w')
|
||||
f = open(path, 'w')
|
||||
f.write(text)
|
||||
f.close()
|
||||
|
||||
|
@ -67,7 +67,7 @@ class StagingHelper(object):
|
||||
|
||||
def get_project_binarylist(self, project, repository, arch):
|
||||
query = {'view': 'binarylist', 'repository': repository, 'arch': arch}
|
||||
root = ET.parse(http_GET(makeurl(self.apiurl,['build', project, '_result'],
|
||||
root = ET.parse(http_GET(makeurl(self.apiurl, ['build', project, '_result'],
|
||||
query=query))).getroot()
|
||||
return root
|
||||
|
||||
|
@ -11,9 +11,9 @@ import re
|
||||
|
||||
results = []
|
||||
repo = ""
|
||||
architectures = ["x86_64","i586"]
|
||||
architectures = ["x86_64", "i586"]
|
||||
pkg = ""
|
||||
projects = ['openSUSE:Factory','openSUSE:Factory:Rebuild']
|
||||
projects = ['openSUSE:Factory', 'openSUSE:Factory:Rebuild']
|
||||
|
||||
#initialize osc config
|
||||
osc.conf.get_config()
|
||||
|
@ -28,10 +28,10 @@ import urlparse
|
||||
import sys
|
||||
import re
|
||||
from osclib.cache import Cache
|
||||
sys.path.append(".")
|
||||
|
||||
from check_tags_in_requests import TagChecker
|
||||
|
||||
sys.path.append(".")
|
||||
|
||||
APIURL = 'https://maintenancetest.example.com'
|
||||
FIXTURES = os.path.join(os.getcwd(), 'tests/fixtures')
|
||||
|
||||
@ -208,7 +208,7 @@ Pico text editor while also offering a few enhancements.</description>
|
||||
body=self._request_withhistory)
|
||||
|
||||
httpretty.register_uri(httpretty.GET,
|
||||
re.compile (re.escape(APIURL + "/search/request?")),
|
||||
re.compile(re.escape(APIURL + "/search/request?")),
|
||||
match_querystring=True,
|
||||
body='<collection matches="0"></collection>')
|
||||
|
||||
|
@ -172,7 +172,7 @@ class UpdateCrawler(object):
|
||||
srcrev = a.src_rev
|
||||
# sometimes requests only contain the decimal revision
|
||||
if re.match(r'^\d+$', srcrev) is not None:
|
||||
xml = ET.fromstring(self._get_source_package(src_project,src_package, srcrev))
|
||||
xml = ET.fromstring(self._get_source_package(src_project, src_package, srcrev))
|
||||
srcrev = xml.get('verifymd5')
|
||||
logging.debug('rev {}'.format(srcrev))
|
||||
if srcrev == rev:
|
||||
@ -351,7 +351,6 @@ def main(args):
|
||||
logging.debug("skipped packages: %s", pformat(uc.skipped))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
description = 'Create update SRs for Leap.'
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
|
Loading…
x
Reference in New Issue
Block a user