Use f-strings where possible
This commit is contained in:
parent
9ec6e64546
commit
730630f06f
70
ReviewBot.py
70
ReviewBot.py
@ -121,9 +121,9 @@ class ReviewBot(object):
|
||||
self.request_default_return = None
|
||||
self.comment_handler = False
|
||||
self.override_allow = True
|
||||
self.override_group_key = '{}-override-group'.format(self.bot_name.lower())
|
||||
self.override_group_key = f'{self.bot_name.lower()}-override-group'
|
||||
self.request_age_min_default = 0
|
||||
self.request_age_min_key = '{}-request-age-min'.format(self.bot_name.lower())
|
||||
self.request_age_min_key = f'{self.bot_name.lower()}-request-age-min'
|
||||
self.lookup = PackageLookup(self.apiurl)
|
||||
|
||||
self.load_config()
|
||||
@ -147,7 +147,7 @@ class ReviewBot(object):
|
||||
return True
|
||||
except HTTPError as e:
|
||||
if e.code != 404:
|
||||
self.logger.error('ERROR in URL %s [%s]' % (url, e))
|
||||
self.logger.error(f'ERROR in URL {url} [{e}]')
|
||||
raise
|
||||
return False
|
||||
|
||||
@ -172,7 +172,7 @@ class ReviewBot(object):
|
||||
def review_mode(self, value: Union[ReviewChoices, str]) -> None:
|
||||
val = ReviewChoices(value)
|
||||
if val not in self.REVIEW_CHOICES:
|
||||
raise ValueError("invalid review option: %s" % val)
|
||||
raise ValueError(f"invalid review option: {val}")
|
||||
self._review_mode = val
|
||||
|
||||
def set_request_ids(self, ids):
|
||||
@ -196,7 +196,7 @@ class ReviewBot(object):
|
||||
return_value = 0
|
||||
|
||||
for req in self.requests:
|
||||
self.logger.info("checking %s" % req.reqid)
|
||||
self.logger.info(f"checking {req.reqid}")
|
||||
self.request = req
|
||||
|
||||
# XXX: this is a hack. Annotating the request with staging_project.
|
||||
@ -225,7 +225,7 @@ class ReviewBot(object):
|
||||
good = True
|
||||
|
||||
if good is None:
|
||||
self.logger.info("%s ignored" % req.reqid)
|
||||
self.logger.info(f"{req.reqid} ignored")
|
||||
elif good:
|
||||
self._set_review(req, 'accepted')
|
||||
elif self.review_mode != ReviewChoices.ACCEPT_ONPASS:
|
||||
@ -256,7 +256,7 @@ class ReviewBot(object):
|
||||
return None
|
||||
|
||||
for args, who in self.request_commands('override'):
|
||||
message = 'overridden by {}'.format(who)
|
||||
message = f'overridden by {who}'
|
||||
override = args[1] if len(args) >= 2 else 'accept'
|
||||
if override == 'accept':
|
||||
self.review_messages['accepted'] = message
|
||||
@ -285,7 +285,7 @@ class ReviewBot(object):
|
||||
def _set_review(self, req, state):
|
||||
doit = self.can_accept_review(req.reqid)
|
||||
if doit is None:
|
||||
self.logger.info("can't change state, %s does not have the reviewer" % (req.reqid))
|
||||
self.logger.info(f"can't change state, {req.reqid} does not have the reviewer")
|
||||
|
||||
newstate = state
|
||||
|
||||
@ -293,11 +293,11 @@ class ReviewBot(object):
|
||||
by_group = self.fallback_group
|
||||
|
||||
msg = self.review_messages[state] if state in self.review_messages else state
|
||||
self.logger.info("%s %s: %s" % (req.reqid, state, msg))
|
||||
self.logger.info(f"{req.reqid} {state}: {msg}")
|
||||
|
||||
if state == 'declined':
|
||||
if self.review_mode == ReviewChoices.FALLBACK_ONFAIL:
|
||||
self.logger.info("%s needs fallback reviewer" % req.reqid)
|
||||
self.logger.info(f"{req.reqid} needs fallback reviewer")
|
||||
self.add_review(req, by_group=by_group, by_user=by_user,
|
||||
msg="Automated review failed. Needs fallback reviewer.")
|
||||
newstate = 'accepted'
|
||||
@ -306,9 +306,9 @@ class ReviewBot(object):
|
||||
|
||||
if doit:
|
||||
if self.dryrun:
|
||||
self.logger.info("(dryrun) would set %s to %s with message %s" % (req.reqid, state, msg))
|
||||
self.logger.info(f"(dryrun) would set {req.reqid} to {state} with message {msg}")
|
||||
else:
|
||||
self.logger.debug("setting %s to %s" % (req.reqid, state))
|
||||
self.logger.debug(f"setting {req.reqid} to {state}")
|
||||
try:
|
||||
osc.core.change_review_state(apiurl=self.apiurl,
|
||||
reqid=req.reqid, newstate=newstate,
|
||||
@ -319,7 +319,7 @@ class ReviewBot(object):
|
||||
raise e
|
||||
self.logger.info('unable to change review state (likely superseded or revoked)')
|
||||
else:
|
||||
self.logger.debug("%s review not changed" % (req.reqid))
|
||||
self.logger.debug(f"{req.reqid} review not changed")
|
||||
|
||||
def _is_duplicate_review(self, review, query, allow_duplicate):
|
||||
if review.by_group != query.get('by_group'):
|
||||
@ -363,7 +363,7 @@ class ReviewBot(object):
|
||||
|
||||
u = osc.core.makeurl(self.apiurl, ['request', req.reqid], query)
|
||||
if self.dryrun:
|
||||
self.logger.info('POST %s' % u)
|
||||
self.logger.info(f'POST {u}')
|
||||
return
|
||||
|
||||
if self.multiple_actions:
|
||||
@ -376,17 +376,17 @@ class ReviewBot(object):
|
||||
if e.code != 403:
|
||||
raise e
|
||||
del query['cmd']
|
||||
self.logger.info('unable to add review {} with message: {}'.format(query, msg))
|
||||
self.logger.info(f'unable to add review {query} with message: {msg}')
|
||||
return
|
||||
|
||||
code = ET.parse(r).getroot().attrib['code']
|
||||
if code != 'ok':
|
||||
raise Exception('non-ok return code: {}'.format(code))
|
||||
raise Exception(f'non-ok return code: {code}')
|
||||
|
||||
def devel_project_review_add(self, request, project, package, message='adding devel project review'):
|
||||
devel_project, devel_package = devel_project_fallback(self.apiurl, project, package)
|
||||
if not devel_project:
|
||||
self.logger.warning('no devel project found for {}/{}'.format(project, package))
|
||||
self.logger.warning(f'no devel project found for {project}/{package}')
|
||||
return False
|
||||
|
||||
self.add_review(request, by_project=devel_project, by_package=devel_package, msg=message)
|
||||
@ -541,7 +541,7 @@ class ReviewBot(object):
|
||||
# to find the real package name
|
||||
(linkprj, linkpkg) = self._get_linktarget(a.src_project, pkgname)
|
||||
if linkpkg is None or linkprj is None or linkprj != a.tgt_project:
|
||||
self.logger.warning("%s/%s is not a link to %s" % (a.src_project, pkgname, a.tgt_project))
|
||||
self.logger.warning(f"{a.src_project}/{pkgname} is not a link to {a.tgt_project}")
|
||||
return self.check_source_submission(a.src_project, a.src_package, a.src_rev, a.tgt_project, a.tgt_package)
|
||||
else:
|
||||
pkgname = linkpkg
|
||||
@ -555,14 +555,14 @@ class ReviewBot(object):
|
||||
# comment_write() is called by another bot wrapping __default().
|
||||
self.comment_handler_remove()
|
||||
|
||||
message = 'unhandled request type {}'.format(a.type)
|
||||
message = f'unhandled request type {a.type}'
|
||||
self.logger.info(message)
|
||||
self.review_messages['accepted'] += ': ' + message
|
||||
return self.request_default_return
|
||||
|
||||
def check_source_submission(self, src_project: str, src_package: str, src_rev: str, target_project: str, target_package: str) -> None:
|
||||
""" default implemention does nothing """
|
||||
self.logger.info("%s/%s@%s -> %s/%s" % (src_project, src_package, src_rev, target_project, target_package))
|
||||
self.logger.info(f"{src_project}/{src_package}@{src_rev} -> {target_project}/{target_package}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
@ -637,17 +637,17 @@ class ReviewBot(object):
|
||||
if self.review_group and self._has_open_review_by(root, 'by_group', self.review_group):
|
||||
return True
|
||||
except HTTPError as e:
|
||||
print('ERROR in URL %s [%s]' % (url, e))
|
||||
print(f'ERROR in URL {url} [{e}]')
|
||||
return False
|
||||
|
||||
def set_request_ids_search_review(self):
|
||||
review = None
|
||||
if self.review_user:
|
||||
review = "@by_user='%s' and @state='new'" % self.review_user
|
||||
review = f"@by_user='{self.review_user}' and @state='new'"
|
||||
if self.review_group:
|
||||
review = osc.core.xpath_join(review, "@by_group='%s' and @state='new'" % self.review_group)
|
||||
review = osc.core.xpath_join(review, f"@by_group='{self.review_group}' and @state='new'")
|
||||
url = osc.core.makeurl(self.apiurl, ('search', 'request'), {
|
||||
'match': "state/@name='review' and review[%s]" % review, 'withfullhistory': 1})
|
||||
'match': f"state/@name='review' and review[{review}]", 'withfullhistory': 1})
|
||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||
|
||||
self.requests = []
|
||||
@ -659,7 +659,7 @@ class ReviewBot(object):
|
||||
|
||||
# also used by openqabot
|
||||
def ids_project(self, project, typename):
|
||||
xpath = "(state/@name='review' or state/@name='new') and (action/target/@project='%s' and action/@type='%s')" % (project, typename)
|
||||
xpath = f"(state/@name='review' or state/@name='new') and (action/target/@project='{project}' and action/@type='{typename}')"
|
||||
url = osc.core.makeurl(self.apiurl, ('search', 'request'),
|
||||
{'match': xpath,
|
||||
'withfullhistory': 1})
|
||||
@ -732,7 +732,7 @@ class ReviewBot(object):
|
||||
|
||||
if message is None:
|
||||
if not len(self.comment_handler.lines):
|
||||
self.logger.debug('skipping empty comment for {}'.format(debug_key))
|
||||
self.logger.debug(f'skipping empty comment for {debug_key}')
|
||||
return
|
||||
message = '\n\n'.join(self.comment_handler.lines)
|
||||
|
||||
@ -756,21 +756,21 @@ class ReviewBot(object):
|
||||
|
||||
if self._is_comment_identical(comment, message, identical):
|
||||
# Assume same state/result and number of lines in message is duplicate.
|
||||
self.logger.debug('previous comment too similar on {}'.format(debug_key))
|
||||
self.logger.debug(f'previous comment too similar on {debug_key}')
|
||||
return
|
||||
|
||||
if comment is None:
|
||||
self.logger.debug('broadening search to include any state on {}'.format(debug_key))
|
||||
self.logger.debug(f'broadening search to include any state on {debug_key}')
|
||||
comment, _ = self.comment_api.comment_find(comments, bot_name)
|
||||
if comment is not None:
|
||||
self.logger.debug('removing previous comment on {}'.format(debug_key))
|
||||
self.logger.debug(f'removing previous comment on {debug_key}')
|
||||
if not self.dryrun:
|
||||
self.comment_api.delete(comment['id'])
|
||||
elif only_replace:
|
||||
self.logger.debug('no previous comment to replace on {}'.format(debug_key))
|
||||
self.logger.debug(f'no previous comment to replace on {debug_key}')
|
||||
return
|
||||
|
||||
self.logger.debug('adding comment to {}: {}'.format(debug_key, message))
|
||||
self.logger.debug(f'adding comment to {debug_key}: {message}')
|
||||
if not self.dryrun:
|
||||
self.comment_api.add_comment(comment=message, **kwargs)
|
||||
|
||||
@ -787,7 +787,7 @@ class ReviewBot(object):
|
||||
|
||||
def _check_matching_srcmd5(self, project, package, rev, history_limit=5):
|
||||
"""check if factory sources contain the package and revision. check head and history"""
|
||||
self.logger.debug("checking %s in %s" % (package, project))
|
||||
self.logger.debug(f"checking {package} in {project}")
|
||||
try:
|
||||
osc.core.show_package_meta(self.apiurl, project, package)
|
||||
except (HTTPError, URLError):
|
||||
@ -816,9 +816,9 @@ class ReviewBot(object):
|
||||
node = revision.find('srcmd5')
|
||||
if node is None:
|
||||
continue
|
||||
self.logger.debug("checking %s" % node.text)
|
||||
self.logger.debug(f"checking {node.text}")
|
||||
if node.text == rev:
|
||||
self.logger.debug("got it, rev %s" % revision.get('rev'))
|
||||
self.logger.debug(f"got it, rev {revision.get('rev')}")
|
||||
return True
|
||||
if i == history_limit:
|
||||
break
|
||||
@ -995,7 +995,7 @@ class CommandLineInterface(cmdln.Cmdln):
|
||||
except ExTimeout:
|
||||
pass
|
||||
signal.alarm(0)
|
||||
self.logger.info("recheck at %s" % datetime.datetime.now().isoformat())
|
||||
self.logger.info(f"recheck at {datetime.datetime.now().isoformat()}")
|
||||
else:
|
||||
self.logger.info("sleeping %d minutes." % interval)
|
||||
time.sleep(interval * 60)
|
||||
|
@ -51,7 +51,7 @@ class ToolBase(object):
|
||||
return http_GET(url)
|
||||
except HTTPError as e:
|
||||
if 500 <= e.code <= 599:
|
||||
print('Retrying {}'.format(url))
|
||||
print(f'Retrying {url}')
|
||||
time.sleep(1)
|
||||
return self.retried_GET(url)
|
||||
logging.error('%s: %s', e, url)
|
||||
@ -60,7 +60,7 @@ class ToolBase(object):
|
||||
logging.error('%s: "%s - %s" %s', e, e.reason, type(e.reason), url)
|
||||
# connection timeout
|
||||
if isinstance(e.reason, TimeoutError):
|
||||
print('Retrying {}'.format(url))
|
||||
print(f'Retrying {url}')
|
||||
time.sleep(1)
|
||||
return self.retried_GET(url)
|
||||
raise e
|
||||
@ -202,7 +202,7 @@ class CommandLineInterface(cmdln.Cmdln):
|
||||
except ExTimeout:
|
||||
pass
|
||||
signal.alarm(0)
|
||||
logger.info("recheck at %s" % datetime.datetime.now().isoformat())
|
||||
logger.info(f"recheck at {datetime.datetime.now().isoformat()}")
|
||||
continue
|
||||
break
|
||||
|
||||
|
@ -40,9 +40,9 @@ class BoilderPlate(cmdln.Cmdln):
|
||||
"""
|
||||
|
||||
for req in self.session.query(DB.Request).all():
|
||||
print('%s %s'%(req.id, req.state))
|
||||
print(f'{req.id} {req.state}')
|
||||
for a in req.abichecks:
|
||||
print(' %s %s %s'%(a.dst_project, a.dst_package, a.result))
|
||||
print(f' {a.dst_project} {a.dst_package} {a.result}')
|
||||
for r in a.reports:
|
||||
print(' %s %10s %-25s %s'%(r.id, r.arch, r.dst_lib, r.result))
|
||||
|
||||
@ -98,7 +98,7 @@ class BoilderPlate(cmdln.Cmdln):
|
||||
|
||||
request = self.session.query(DB.Request).filter(DB.Request.id == request_id).one()
|
||||
logentry = DB.Log(request_id = request_id,
|
||||
line = 'manually setting state to seen. previous state: %s (%s)'%(request.state, request.result))
|
||||
line = f'manually setting state to seen. previous state: {request.state} ({request.result})')
|
||||
request.state = 'seen'
|
||||
request.result = None
|
||||
self.session.add(logentry)
|
||||
|
@ -78,7 +78,7 @@ LibResult = namedtuple('LibResult', ('src_repo', 'src_lib', 'dst_repo', 'dst_lib
|
||||
class DistUrlMismatch(Exception):
|
||||
def __init__(self, disturl, md5):
|
||||
Exception.__init__(self)
|
||||
self.msg = 'disturl mismatch has: %s wanted ...%s'%(disturl, md5)
|
||||
self.msg = f'disturl mismatch has: {disturl} wanted ...{md5}'
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
@ -87,7 +87,7 @@ class DistUrlMismatch(Exception):
|
||||
class SourceBroken(Exception):
|
||||
def __init__(self, project, package):
|
||||
Exception.__init__(self)
|
||||
self.msg = '%s/%s has broken sources, needs rebase'%(project, package)
|
||||
self.msg = f'{project}/{package} has broken sources, needs rebase'
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
@ -96,7 +96,7 @@ class SourceBroken(Exception):
|
||||
class NoBuildSuccess(Exception):
|
||||
def __init__(self, project, package, md5):
|
||||
Exception.__init__(self)
|
||||
self.msg = '%s/%s(%s) had no successful build'%(project, package, md5)
|
||||
self.msg = f'{project}/{package}({md5}) had no successful build'
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
@ -105,7 +105,7 @@ class NoBuildSuccess(Exception):
|
||||
class NotReadyYet(Exception):
|
||||
def __init__(self, project, package, reason):
|
||||
Exception.__init__(self)
|
||||
self.msg = '%s/%s not ready yet: %s'%(project, package, reason)
|
||||
self.msg = f'{project}/{package} not ready yet: {reason}'
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
@ -218,14 +218,14 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
dst_srcinfo = self.get_sourceinfo(dst_project, dst_package)
|
||||
self.logger.debug('dest sourceinfo %s', pformat(dst_srcinfo))
|
||||
if dst_srcinfo is None:
|
||||
msg = "%s/%s seems to be a new package, no need to review"%(dst_project, dst_package)
|
||||
msg = f"{dst_project}/{dst_package} seems to be a new package, no need to review"
|
||||
self.logger.info(msg)
|
||||
self.reports.append(report)
|
||||
return True
|
||||
src_srcinfo = self.get_sourceinfo(src_project, src_package, src_rev)
|
||||
self.logger.debug('src sourceinfo %s', pformat(src_srcinfo))
|
||||
if src_srcinfo is None:
|
||||
msg = "%s/%s@%s does not exist!? can't check"%(src_project, src_package, src_rev)
|
||||
msg = f"{src_project}/{src_package}@{src_rev} does not exist!? can't check"
|
||||
self.logger.error(msg)
|
||||
self.text_summary += msg + "\n"
|
||||
self.reports.append(report)
|
||||
@ -239,7 +239,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
myrepos = self.findrepos(src_project, src_srcinfo, dst_project, dst_srcinfo)
|
||||
except NoBuildSuccess as e:
|
||||
self.logger.info(e)
|
||||
self.text_summary += "**Error**: %s\n"%e
|
||||
self.text_summary += f"**Error**: {e}\n"
|
||||
self.reports.append(report)
|
||||
return False
|
||||
except NotReadyYet as e:
|
||||
@ -248,12 +248,12 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
return None
|
||||
except SourceBroken as e:
|
||||
self.logger.error(e)
|
||||
self.text_summary += "**Error**: %s\n"%e
|
||||
self.text_summary += f"**Error**: {e}\n"
|
||||
self.reports.append(report)
|
||||
return False
|
||||
|
||||
if not myrepos:
|
||||
self.text_summary += "**Error**: %s does not build against %s, can't check library ABIs\n\n"%(src_project, dst_project)
|
||||
self.text_summary += f"**Error**: {src_project} does not build against {dst_project}, can't check library ABIs\n\n"
|
||||
self.logger.info("no matching repos, can't compare")
|
||||
self.reports.append(report)
|
||||
return False
|
||||
@ -272,13 +272,13 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
if new_repo_map is not None:
|
||||
myrepos = new_repo_map
|
||||
except MaintenanceError as e:
|
||||
self.text_summary += "**Error**: %s\n\n"%e
|
||||
self.text_summary += f"**Error**: {e}\n\n"
|
||||
self.logger.error('%s', e)
|
||||
self.reports.append(report)
|
||||
return False
|
||||
except NoBuildSuccess as e:
|
||||
self.logger.info(e)
|
||||
self.text_summary += "**Error**: %s\n"%e
|
||||
self.text_summary += f"**Error**: {e}\n"
|
||||
self.reports.append(report)
|
||||
return False
|
||||
except NotReadyYet as e:
|
||||
@ -287,7 +287,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
return None
|
||||
except SourceBroken as e:
|
||||
self.logger.error(e)
|
||||
self.text_summary += "**Error**: %s\n"%e
|
||||
self.text_summary += f"**Error**: {e}\n"
|
||||
self.reports.append(report)
|
||||
return False
|
||||
|
||||
@ -305,7 +305,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
if dst_libs is None:
|
||||
continue
|
||||
except DistUrlMismatch as e:
|
||||
self.logger.error("%s/%s %s/%s: %s"%(dst_project, dst_package, mr.dstrepo, mr.arch, e))
|
||||
self.logger.error(f"{dst_project}/{dst_package} {mr.dstrepo}/{mr.arch}: {e}")
|
||||
if ret == True: # need to check again
|
||||
ret = None
|
||||
continue
|
||||
@ -326,7 +326,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
self.text_summary += "*Warning*: the submission does not contain any libs anymore\n\n"
|
||||
continue
|
||||
except DistUrlMismatch as e:
|
||||
self.logger.error("%s/%s %s/%s: %s"%(src_project, src_package, mr.srcrepo, mr.arch, e))
|
||||
self.logger.error(f"{src_project}/{src_package} {mr.srcrepo}/{mr.arch}: {e}")
|
||||
if ret == True: # need to check again
|
||||
ret = None
|
||||
continue
|
||||
@ -362,7 +362,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
pairs.add((lib, l))
|
||||
found = True
|
||||
if found == False:
|
||||
self.text_summary += "*Warning*: %s no longer packaged\n\n"%lib
|
||||
self.text_summary += f"*Warning*: {lib} no longer packaged\n\n"
|
||||
|
||||
self.logger.debug("to diff: %s", pformat(pairs))
|
||||
|
||||
@ -385,7 +385,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
|
||||
# we just need that to pass a name to abi checker
|
||||
m = so_re.match(old)
|
||||
htmlreport = 'report-%s-%s-%s-%s-%s-%08x.html'%(mr.srcrepo, os.path.basename(old), mr.dstrepo, os.path.basename(new), mr.arch, int(time.time()))
|
||||
htmlreport = f'report-{mr.srcrepo}-{os.path.basename(old)}-{mr.dstrepo}-{os.path.basename(new)}-{mr.arch}-{int(time.time()):08x}.html'
|
||||
|
||||
# run abichecker
|
||||
if m \
|
||||
@ -401,8 +401,8 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
elif overall == True and r == False:
|
||||
overall = r
|
||||
else:
|
||||
self.logger.error('failed to compare %s <> %s'%(old,new))
|
||||
self.text_summary += "**Error**: ABI check failed on %s vs %s\n\n"%(old, new)
|
||||
self.logger.error(f'failed to compare {old} <> {new}')
|
||||
self.text_summary += f"**Error**: ABI check failed on {old} vs {new}\n\n"
|
||||
if ret == True: # need to check again
|
||||
ret = None
|
||||
|
||||
@ -429,7 +429,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
|
||||
# find the maintenance project
|
||||
url = osc.core.makeurl(self.apiurl, ('search', 'project', 'id'),
|
||||
"match=(maintenance/maintains/@project='%s'+and+attribute/@name='%s')"%(dst_project, osc.conf.config['maintenance_attribute']))
|
||||
f"match=(maintenance/maintains/@project='{dst_project}'+and+attribute/@name='{osc.conf.config['maintenance_attribute']}')")
|
||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||
if root is not None:
|
||||
node = root.find('project')
|
||||
@ -448,7 +448,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
if node.get('code') != 'disabled':
|
||||
alldisabled = False
|
||||
if alldisabled:
|
||||
self.logger.debug("all repos disabled, using originproject %s"%originproject)
|
||||
self.logger.debug(f"all repos disabled, using originproject {originproject}")
|
||||
else:
|
||||
originproject = None
|
||||
else:
|
||||
@ -456,20 +456,20 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
# packages are only a link to packagename.incidentnr
|
||||
(linkprj, linkpkg) = self._get_linktarget(dst_project, pkg)
|
||||
if linkpkg is not None and linkprj == dst_project:
|
||||
self.logger.debug("%s/%s links to %s"%(dst_project, pkg, linkpkg))
|
||||
self.logger.debug(f"{dst_project}/{pkg} links to {linkpkg}")
|
||||
regex = re.compile(r'.*\.(\d+)$')
|
||||
m = regex.match(linkpkg)
|
||||
if m is None:
|
||||
raise MaintenanceError("%s/%s -> %s/%s is not a proper maintenance link (must match /%s/)"%(dst_project, pkg, linkprj, linkpkg, regex.pattern))
|
||||
raise MaintenanceError(f"{dst_project}/{pkg} -> {linkprj}/{linkpkg} is not a proper maintenance link (must match /{regex.pattern}/)")
|
||||
incident = m.group(1)
|
||||
self.logger.debug("is maintenance incident %s"%incident)
|
||||
self.logger.debug(f"is maintenance incident {incident}")
|
||||
|
||||
originproject = "%s:%s"%(mproject, incident)
|
||||
originproject = f"{mproject}:{incident}"
|
||||
originpackage = pkg+'.'+dst_project.replace(':', '_')
|
||||
|
||||
origin_srcinfo = self.get_sourceinfo(originproject, originpackage)
|
||||
if origin_srcinfo is None:
|
||||
raise MaintenanceError("%s/%s invalid"%(originproject, originpackage))
|
||||
raise MaintenanceError(f"{originproject}/{originpackage} invalid")
|
||||
|
||||
# find the map of maintenance incident repos to destination repos
|
||||
originrepos = self.findrepos(originproject, origin_srcinfo, dst_project, dst_srcinfo)
|
||||
@ -486,7 +486,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
# sometimes a previously released maintenance
|
||||
# update didn't cover all architectures. We can
|
||||
# only ignore that then.
|
||||
self.logger.warning("couldn't find repo %s/%s in %s/%s"%(mr.dstrepo, mr.arch, originproject, originpackage))
|
||||
self.logger.warning(f"couldn't find repo {mr.dstrepo}/{mr.arch} in {originproject}/{originpackage}")
|
||||
continue
|
||||
matchrepos.add(MR(mr.srcrepo, mapped[(mr.dstrepo, mr.arch)].srcrepo, mr.arch))
|
||||
|
||||
@ -553,7 +553,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
if ret is not None and self.text_summary == '':
|
||||
# if for some reason save_reports_to_db didn't produce a
|
||||
# summary we add one
|
||||
self.text_summary = "ABI checker result: [%s](%s/request/%s)"%(result, WEB_URL, req.reqid)
|
||||
self.text_summary = f"ABI checker result: [{result}]({WEB_URL}/request/{req.reqid})"
|
||||
|
||||
if commentid and not self.dryrun:
|
||||
self.commentapi.delete(commentid)
|
||||
@ -613,10 +613,10 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
continue
|
||||
elif r.result:
|
||||
self.text_summary += "Good news from ABI check, "
|
||||
self.text_summary += "%s seems to be ABI [compatible](%s/request/%s):\n\n"%(r.dst_package, WEB_URL, req.reqid)
|
||||
self.text_summary += f"{r.dst_package} seems to be ABI [compatible]({WEB_URL}/request/{req.reqid}):\n\n"
|
||||
else:
|
||||
self.text_summary += "Warning: bad news from ABI check, "
|
||||
self.text_summary += "%s may be ABI [**INCOMPATIBLE**](%s/request/%s):\n\n"%(r.dst_package, WEB_URL, req.reqid)
|
||||
self.text_summary += f"{r.dst_package} may be ABI [**INCOMPATIBLE**]({WEB_URL}/request/{req.reqid}):\n\n"
|
||||
for lr in r.reports:
|
||||
libreport = DB.LibReport(
|
||||
abicheck = abicheck,
|
||||
@ -643,7 +643,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
msg = "<!-- abichecker state=%s%s -->\n"%(state, ' result=%s'%result if result else '')
|
||||
msg += self.text_summary
|
||||
|
||||
self.logger.info("add comment: %s"%msg)
|
||||
self.logger.info(f"add comment: {msg}")
|
||||
if not self.dryrun:
|
||||
#self.commentapi.delete_from_where_user(self.review_user, request_id = req.reqid)
|
||||
self.commentapi.add_comment(request_id = req.reqid, comment = msg)
|
||||
@ -672,7 +672,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
self.logger.debug(cmd)
|
||||
r = subprocess.Popen(cmd, close_fds=True, cwd=CACHEDIR).wait()
|
||||
if r != 0:
|
||||
self.logger.error("failed to dump %s!"%filename)
|
||||
self.logger.error(f"failed to dump {filename}!")
|
||||
# XXX: record error
|
||||
return False
|
||||
return True
|
||||
@ -683,7 +683,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
fetchlist, liblist, debuglist = self.compute_fetchlist(project, package, srcinfo, repo, arch)
|
||||
|
||||
if not fetchlist:
|
||||
msg = "no libraries found in %s/%s %s/%s"%(project, package, repo, arch)
|
||||
msg = f"no libraries found in {project}/{package} {repo}/{arch}"
|
||||
self.logger.info(msg)
|
||||
return None, None
|
||||
|
||||
@ -703,14 +703,14 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
# extract binary rpms
|
||||
tmpfile = os.path.join(CACHEDIR, "cpio")
|
||||
for fn in fetchlist:
|
||||
self.logger.debug("extract %s"%fn)
|
||||
self.logger.debug(f"extract {fn}")
|
||||
with open(tmpfile, 'wb') as tmpfd:
|
||||
if fn not in downloaded:
|
||||
raise FetchError("%s was not downloaded!"%fn)
|
||||
raise FetchError(f"{fn} was not downloaded!")
|
||||
self.logger.debug(downloaded[fn])
|
||||
r = subprocess.call(['rpm2cpio', downloaded[fn]], stdout=tmpfd, close_fds=True)
|
||||
if r != 0:
|
||||
raise FetchError("failed to extract %s!"%fn)
|
||||
raise FetchError(f"failed to extract {fn}!")
|
||||
tmpfd.close()
|
||||
os.unlink(downloaded[fn])
|
||||
cpio = CpioRead(tmpfile)
|
||||
@ -745,7 +745,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
downloaded = dict()
|
||||
for fn in filenames:
|
||||
if fn not in mtimes:
|
||||
raise FetchError("missing mtime information for %s, can't check"% fn)
|
||||
raise FetchError(f"missing mtime information for {fn}, can't check")
|
||||
repodir = os.path.join(DOWNLOADS, package, project, repo)
|
||||
if not os.path.exists(repodir):
|
||||
os.makedirs(repodir)
|
||||
@ -781,7 +781,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
try:
|
||||
r = osc.core.http_GET(u)
|
||||
except HTTPError as e:
|
||||
raise FetchError('failed to fetch header information: %s'%e)
|
||||
raise FetchError(f'failed to fetch header information: {e}')
|
||||
tmpfile = NamedTemporaryFile(prefix="cpio-", delete=False)
|
||||
for chunk in r:
|
||||
tmpfile.write(chunk)
|
||||
@ -799,7 +799,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
fh.seek(ch.dataoff, os.SEEK_SET)
|
||||
h = self.readRpmHeaderFD(fh)
|
||||
if h is None:
|
||||
raise FetchError("failed to read rpm header for %s"%ch.filename)
|
||||
raise FetchError(f"failed to read rpm header for {ch.filename}")
|
||||
m = rpm_re.match(ch.filename.decode('utf-8'))
|
||||
if m:
|
||||
yield m.group(1), h
|
||||
@ -827,7 +827,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
return ET.parse(osc.core.http_GET(url)).getroot()
|
||||
except HTTPError as e:
|
||||
if e.code != 404:
|
||||
self.logger.error('ERROR in URL %s [%s]' % (url, e))
|
||||
self.logger.error(f'ERROR in URL {url} [{e}]')
|
||||
raise
|
||||
pass
|
||||
return None
|
||||
@ -890,16 +890,16 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
|
||||
for mr in matchrepos:
|
||||
if not (mr.srcrepo, mr.arch) in rmap:
|
||||
self.logger.warning("%s/%s had no build success"%(mr.srcrepo, mr.arch))
|
||||
self.logger.warning(f"{mr.srcrepo}/{mr.arch} had no build success")
|
||||
raise NotReadyYet(src_project, src_srcinfo.package, "no result")
|
||||
if rmap[(mr.srcrepo, mr.arch)]['dirty']:
|
||||
self.logger.warning("%s/%s dirty"%(mr.srcrepo, mr.arch))
|
||||
self.logger.warning(f"{mr.srcrepo}/{mr.arch} dirty")
|
||||
raise NotReadyYet(src_project, src_srcinfo.package, "dirty")
|
||||
code = rmap[(mr.srcrepo, mr.arch)]['code']
|
||||
if code == 'broken':
|
||||
raise SourceBroken(src_project, src_srcinfo.package)
|
||||
if code != 'succeeded' and code != 'locked' and code != 'excluded':
|
||||
self.logger.warning("%s/%s not succeeded (%s)"%(mr.srcrepo, mr.arch, code))
|
||||
self.logger.warning(f"{mr.srcrepo}/{mr.arch} not succeeded ({code})")
|
||||
raise NotReadyYet(src_project, src_srcinfo.package, code)
|
||||
|
||||
def findrepos(self, src_project, src_srcinfo, dst_project, dst_srcinfo):
|
||||
@ -928,7 +928,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
name = repo.attrib['name']
|
||||
path = repo.findall('path')
|
||||
if path is None or len(path) != 1:
|
||||
self.logger.error("repo %s has more than one path"%name)
|
||||
self.logger.error(f"repo {name} has more than one path")
|
||||
continue
|
||||
prj = path[0].attrib['project']
|
||||
if prj == 'openSUSE:Tumbleweed':
|
||||
@ -959,7 +959,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
raise NoBuildSuccess(src_project, src_srcinfo.package, src_srcinfo.verifymd5)
|
||||
for mr in matchrepos:
|
||||
if not (mr.srcrepo, arch) in srcrepos:
|
||||
self.logger.error("%s/%s had no build success"%(mr.srcrepo, arch))
|
||||
self.logger.error(f"{mr.srcrepo}/{arch} had no build success")
|
||||
raise NoBuildSuccess(src_project, src_srcinfo.package, src_srcinfo.verifymd5)
|
||||
|
||||
return matchrepos
|
||||
@ -990,7 +990,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
""" scan binary rpms of the specified repo for libraries.
|
||||
Returns a set of packages to fetch and the libraries found
|
||||
"""
|
||||
self.logger.debug('scanning %s/%s %s/%s'%(prj, pkg, repo, arch))
|
||||
self.logger.debug(f'scanning {prj}/{pkg} {repo}/{arch}')
|
||||
|
||||
headers = self._fetchcpioheaders(prj, pkg, repo, arch)
|
||||
missing_debuginfo = set()
|
||||
@ -1017,12 +1017,12 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
lnk = lnk.decode('utf-8')
|
||||
if so_re.match(fn):
|
||||
if S_ISREG(mode):
|
||||
self.logger.debug('found lib: %s'%fn)
|
||||
self.logger.debug(f'found lib: {fn}')
|
||||
lib_packages.setdefault(pkgname, set()).add(fn)
|
||||
elif S_ISLNK(mode) and lnk is not None:
|
||||
alias = os.path.basename(fn)
|
||||
libname = os.path.basename(lnk)
|
||||
self.logger.debug('found alias: %s -> %s'%(alias, libname))
|
||||
self.logger.debug(f'found alias: {alias} -> {libname}')
|
||||
lib_aliases.setdefault(libname, set()).add(alias)
|
||||
|
||||
fetchlist = set()
|
||||
@ -1040,7 +1040,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
files = set ([f.decode('utf-8') for f in h['filenames']])
|
||||
ok = True
|
||||
for lib in lib_packages[pkgname]:
|
||||
libdebug = '/usr/lib/debug%s.debug'%lib
|
||||
libdebug = f'/usr/lib/debug{lib}.debug'
|
||||
if libdebug not in files:
|
||||
# some new format that includes version, release and arch in debuginfo?
|
||||
# FIXME: version and release are actually the
|
||||
@ -1067,7 +1067,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
||||
liblist[lib] |= lib_aliases[libname]
|
||||
|
||||
if missing_debuginfo:
|
||||
self.logger.error('missing debuginfo: %s'%pformat(missing_debuginfo))
|
||||
self.logger.error(f'missing debuginfo: {pformat(missing_debuginfo)}')
|
||||
raise MissingDebugInfo(missing_debuginfo)
|
||||
|
||||
return fetchlist, liblist, debuglist
|
||||
|
@ -74,7 +74,7 @@ class Config(Base):
|
||||
t_updated = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||
|
||||
def db_engine():
|
||||
return create_engine('sqlite:///%s/abi-checker.db'%DATADIR)
|
||||
return create_engine(f'sqlite:///{DATADIR}/abi-checker.db')
|
||||
|
||||
def db_session():
|
||||
engine = db_engine()
|
||||
|
@ -108,14 +108,14 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
if ':Rings' in self.project:
|
||||
self.biarch_packages = set()
|
||||
else:
|
||||
self.biarch_packages = set(self.meta_get_packagelist("%s:Rings:0-Bootstrap" % self.project))
|
||||
self.biarch_packages |= set(self.meta_get_packagelist("%s:Rings:1-MinimalX" % self.project))
|
||||
self.biarch_packages = set(self.meta_get_packagelist(f"{self.project}:Rings:0-Bootstrap"))
|
||||
self.biarch_packages |= set(self.meta_get_packagelist(f"{self.project}:Rings:1-MinimalX"))
|
||||
|
||||
self._init_rdeps()
|
||||
self.fill_package_meta()
|
||||
|
||||
def fill_package_meta(self):
|
||||
url = self.makeurl(['search', 'package'], "match=[@project='%s']" % self.project)
|
||||
url = self.makeurl(['search', 'package'], f"match=[@project='{self.project}']")
|
||||
root = ET.fromstring(self.cached_GET(url))
|
||||
for p in root.findall('package'):
|
||||
name = p.attrib['name']
|
||||
@ -153,7 +153,7 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
|
||||
packages = set()
|
||||
|
||||
for n in result.findall("./result[@arch='{}']/status".format(self.arch)):
|
||||
for n in result.findall(f"./result[@arch='{self.arch}']/status"):
|
||||
if n.get('code') not in ('disabled', 'excluded'):
|
||||
packages.add(n.get('package'))
|
||||
|
||||
@ -167,7 +167,7 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
pkgmeta = self.package_metas[pkg]
|
||||
|
||||
for build in pkgmeta.findall("./build"):
|
||||
for n in build.findall("./enable[@arch='{}']".format(self.arch)):
|
||||
for n in build.findall(f"./enable[@arch='{self.arch}']"):
|
||||
logger.debug("disable %s", pkg)
|
||||
build.remove(n)
|
||||
changed = True
|
||||
@ -233,9 +233,9 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
must_disable = None
|
||||
changed = None
|
||||
|
||||
for n in pkgmeta.findall("./build/enable[@arch='{}']".format(self.arch)):
|
||||
for n in pkgmeta.findall(f"./build/enable[@arch='{self.arch}']"):
|
||||
is_enabled = True
|
||||
for n in pkgmeta.findall("./build/disable[@arch='{}']".format(self.arch)):
|
||||
for n in pkgmeta.findall(f"./build/disable[@arch='{self.arch}']"):
|
||||
is_disabled = True
|
||||
|
||||
if force:
|
||||
@ -251,7 +251,7 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
if is_disabled:
|
||||
logger.info('enabling %s for %s', pkg, self.arch)
|
||||
for build in pkgmeta.findall("./build"):
|
||||
for n in build.findall("./disable[@arch='{}']".format(self.arch)):
|
||||
for n in build.findall(f"./disable[@arch='{self.arch}']"):
|
||||
build.remove(n)
|
||||
changed = True
|
||||
if not changed:
|
||||
@ -272,7 +272,7 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
if is_enabled:
|
||||
logger.info('removing explicit enable %s for %s', pkg, self.arch)
|
||||
for build in pkgmeta.findall("./build"):
|
||||
for n in build.findall("./enable[@arch='{}']".format(self.arch)):
|
||||
for n in build.findall(f"./enable[@arch='{self.arch}']"):
|
||||
build.remove(n)
|
||||
changed = True
|
||||
if not changed:
|
||||
@ -291,7 +291,7 @@ class BiArchTool(ToolBase.ToolBase):
|
||||
if self.caching:
|
||||
self._invalidate__cached_GET(pkgmetaurl)
|
||||
|
||||
if wipebinaries and pkgmeta.find("./build/disable[@arch='{}']".format(self.arch)) is not None:
|
||||
if wipebinaries and pkgmeta.find(f"./build/disable[@arch='{self.arch}']") is not None:
|
||||
logger.debug("wiping %s", pkg)
|
||||
self.http_POST(self.makeurl(['build', self.project], {
|
||||
'cmd': 'wipe',
|
||||
@ -309,7 +309,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
||||
def get_optparser(self):
|
||||
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
||||
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
||||
help='project to process (default: %s)' % FACTORY,
|
||||
help=f'project to process (default: {FACTORY})',
|
||||
default=FACTORY)
|
||||
return parser
|
||||
|
||||
|
12
bugowner.py
12
bugowner.py
@ -55,7 +55,7 @@ class BugownerTool(ToolBase.ToolBase):
|
||||
url = self.makeurl(['person', name])
|
||||
root = ET.fromstring(self.cached_GET(url))
|
||||
|
||||
person = Person(*[root.find('./{}'.format(field)).text for field in Person._fields])
|
||||
person = Person(*[root.find(f'./{field}').text for field in Person._fields])
|
||||
self.persons[name] = person
|
||||
|
||||
return person
|
||||
@ -76,9 +76,9 @@ class BugownerTool(ToolBase.ToolBase):
|
||||
url = self.makeurl(['search', 'owner'], {'binary': package})
|
||||
root = ET.fromstring(self.cached_GET(url))
|
||||
ret = []
|
||||
for node in root.findall('./owner/person[@role="{}"]'.format(role)):
|
||||
for node in root.findall(f'./owner/person[@role="{role}"]'):
|
||||
ret.append(Owner('person', node.get('name')))
|
||||
for node in root.findall('./owner/group[@role="{}"]'.format(role)):
|
||||
for node in root.findall(f'./owner/group[@role="{role}"]'):
|
||||
ret.append(Owner('group', node.get('name')))
|
||||
|
||||
return ret
|
||||
@ -88,7 +88,7 @@ class BugownerTool(ToolBase.ToolBase):
|
||||
root = ET.fromstring(self.cached_GET(url))
|
||||
idname = 'userid' if owner.kind == 'person' else 'groupid'
|
||||
# XXX: can't use 'and' here to filter for bugowner too
|
||||
exists = root.findall('./{}[@{}="{}"]'.format(owner.kind, idname, owner.name))
|
||||
exists = root.findall(f'./{owner.kind}[@{idname}="{owner.name}"]')
|
||||
for node in exists:
|
||||
if node.get('role') == 'bugowner':
|
||||
logger.debug("%s/%s already has %s %s", self.project, package, owner.kind, owner.name)
|
||||
@ -113,7 +113,7 @@ class BugownerTool(ToolBase.ToolBase):
|
||||
user = srcrev['user']
|
||||
|
||||
if self.is_release_manager(user):
|
||||
logging.debug("%s was last touched by %s, ignored." % (package, user))
|
||||
logging.debug(f"{package} was last touched by {user}, ignored.")
|
||||
return None
|
||||
|
||||
return [Owner('person', user)]
|
||||
@ -138,7 +138,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
||||
def get_optparser(self):
|
||||
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
||||
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
||||
help='project to process (default: %s)' % FACTORY,
|
||||
help=f'project to process (default: {FACTORY})',
|
||||
default=FACTORY)
|
||||
parser.add_option('--reference-project', metavar='PROJECT',
|
||||
action='append', help='reference project')
|
||||
|
@ -31,7 +31,7 @@ class RemindedPackage(object):
|
||||
self.problem = problem
|
||||
|
||||
def __str__(self):
|
||||
return '{} {} {} {}'.format(self.firstfail, self.reminded, self.remindCount, self.problem)
|
||||
return f'{self.firstfail} {self.reminded} {self.remindCount} {self.problem}'
|
||||
|
||||
|
||||
def jdefault(o):
|
||||
@ -88,14 +88,14 @@ Kind regards,
|
||||
|
||||
def SendMail(logger, project, sender, to, fullname, subject, text):
|
||||
try:
|
||||
xmailer = '{} - Problem Notification'.format(project)
|
||||
xmailer = f'{project} - Problem Notification'
|
||||
to = email.utils.formataddr((fullname, to))
|
||||
mail_send_with_details(sender=sender, to=to,
|
||||
subject=subject, text=text, xmailer=xmailer,
|
||||
relay=args.relay, dry=args.dry)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
logger.error("Failed to send an email to %s (%s)" % (fullname, to))
|
||||
logger.error(f"Failed to send an email to {fullname} ({to})")
|
||||
|
||||
|
||||
def check_reminder(pname, first, problem, now, Reminded, RemindedLoaded):
|
||||
@ -144,7 +144,7 @@ def main(args):
|
||||
global project
|
||||
project = args.project
|
||||
|
||||
logger.debug('loading build fails for %s' % project)
|
||||
logger.debug(f'loading build fails for {project}')
|
||||
url = osc.core.makeurl(apiurl, ['source', f'{project}:Staging', 'dashboard', f'rebuildpacs.{project}-standard.yaml'])
|
||||
try:
|
||||
_data = osc.core.http_GET(url)
|
||||
@ -162,7 +162,7 @@ def main(args):
|
||||
|
||||
reminded_json = args.json
|
||||
if not reminded_json:
|
||||
reminded_json = '{}.reminded.json'.format(project)
|
||||
reminded_json = f'{project}.reminded.json'
|
||||
|
||||
try:
|
||||
with open(reminded_json) as json_data:
|
||||
@ -225,7 +225,7 @@ def main(args):
|
||||
for userid in maintainers:
|
||||
to = Person[userid][2]
|
||||
fullname = Person[userid][1]
|
||||
subject = '%s - %s - Build problem notification' % (project, package)
|
||||
subject = f'{project} - {package} - Build problem notification'
|
||||
text = MAIL_TEMPLATES[Reminded[package].remindCount - 1] % {
|
||||
'recipient': fullname,
|
||||
'sender': sender,
|
||||
@ -250,11 +250,11 @@ def main(args):
|
||||
ProjectComplainList.sort()
|
||||
to = 'factory@lists.opensuse.org'
|
||||
fullname = "openSUSE Factory - Mailing List"
|
||||
subject = "%(project)s - Build fail notification" % {'project': project}
|
||||
subject = f"{project} - Build fail notification"
|
||||
|
||||
text = u"""Dear Package maintainers and hackers.
|
||||
text = f"""Dear Package maintainers and hackers.
|
||||
|
||||
Below package(s) in %(project)s have had problems for at
|
||||
Below package(s) in {project} have had problems for at
|
||||
least 4 weeks. We tried to send out notifications to the
|
||||
configured bugowner/maintainers of the package(s), but so far no
|
||||
fix has been submitted. This probably means that the
|
||||
@ -262,16 +262,16 @@ maintainer/bugowner did not yet find the time to look into the
|
||||
matter and he/she would certainly appreciate help to get this
|
||||
sorted.
|
||||
|
||||
""" % {'project': project}
|
||||
"""
|
||||
for pkg in ProjectComplainList:
|
||||
text += "- %s: %s\n" % (pkg, Reminded[pkg].problem)
|
||||
text += u"""
|
||||
text += f"- {pkg}: {Reminded[pkg].problem}\n"
|
||||
text += f"""
|
||||
Unless somebody is stepping up and submitting fixes, the listed
|
||||
package(s) are going to be removed from %(project)s.
|
||||
package(s) are going to be removed from {project}.
|
||||
|
||||
Kind regards,
|
||||
%(sender)s
|
||||
""" % {'project': project, 'sender': sender}
|
||||
{sender}
|
||||
"""
|
||||
SendMail(logger, project, sender, to, fullname, subject, text)
|
||||
|
||||
|
||||
|
@ -38,7 +38,7 @@ class MaintenanceChecker(ReviewBot.ReviewBot):
|
||||
if prj.startswith('openSUSE:Leap') or prj.startswith('openSUSE:1'):
|
||||
self.logger.debug("%s looks wrong as maintainer, skipped", prj)
|
||||
continue
|
||||
msg = 'Submission for {} by someone who is not maintainer in the devel project ({}). Please review'.format(pkg, prj)
|
||||
msg = f'Submission for {pkg} by someone who is not maintainer in the devel project ({prj}). Please review'
|
||||
self.add_review(req, by_project=prj, by_package=pkg, msg=msg)
|
||||
|
||||
@staticmethod
|
||||
@ -75,16 +75,16 @@ class MaintenanceChecker(ReviewBot.ReviewBot):
|
||||
if project.startswith('openSUSE:Leap:') and hasattr(a, 'src_project'):
|
||||
mapping = MaintenanceChecker._get_lookup_yml(self.apiurl, project)
|
||||
if mapping is None:
|
||||
self.logger.error("error loading mapping for {}".format(project))
|
||||
self.logger.error(f"error loading mapping for {project}")
|
||||
elif pkgname not in mapping:
|
||||
self.logger.debug("{} not tracked".format(pkgname))
|
||||
self.logger.debug(f"{pkgname} not tracked")
|
||||
else:
|
||||
origin = mapping[pkgname]
|
||||
self.logger.debug("{} comes from {}, submitted from {}".format(pkgname, origin, a.src_project))
|
||||
self.logger.debug(f"{pkgname} comes from {origin}, submitted from {a.src_project}")
|
||||
if origin.startswith('SUSE:SLE-12') and a.src_project.startswith('SUSE:SLE-12') \
|
||||
or origin.startswith('SUSE:SLE-15') and a.src_project.startswith('SUSE:SLE-15') \
|
||||
or origin.startswith('openSUSE:Leap') and a.src_project.startswith('openSUSE:Leap'):
|
||||
self.logger.info("{} submitted from {}, no maintainer review needed".format(pkgname, a.src_project))
|
||||
self.logger.info(f"{pkgname} submitted from {a.src_project}, no maintainer review needed")
|
||||
return
|
||||
|
||||
maintainers = set(maintainers_get(self.apiurl, project, pkgname))
|
||||
@ -92,18 +92,18 @@ class MaintenanceChecker(ReviewBot.ReviewBot):
|
||||
known_maintainer = False
|
||||
for m in maintainers:
|
||||
if author == m:
|
||||
self.logger.debug("%s is maintainer" % author)
|
||||
self.logger.debug(f"{author} is maintainer")
|
||||
known_maintainer = True
|
||||
if not known_maintainer:
|
||||
for r in req.reviews:
|
||||
if r.by_user in maintainers:
|
||||
self.logger.debug("found %s as reviewer" % r.by_user)
|
||||
self.logger.debug(f"found {r.by_user} as reviewer")
|
||||
known_maintainer = True
|
||||
if not known_maintainer:
|
||||
self.logger.debug("author: %s, maintainers: %s => need review" % (author, ','.join(maintainers)))
|
||||
self.logger.debug(f"author: {author}, maintainers: {','.join(maintainers)} => need review")
|
||||
self.needs_maintainer_review.add(pkgname)
|
||||
else:
|
||||
self.logger.warning("%s doesn't have maintainers" % pkgname)
|
||||
self.logger.warning(f"{pkgname} doesn't have maintainers")
|
||||
self.needs_maintainer_review.add(pkgname)
|
||||
|
||||
def check_action_maintenance_incident(self, req, a):
|
||||
|
@ -144,7 +144,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
|
||||
return True
|
||||
elif (kind is not None and kind != 'source'):
|
||||
self.review_messages['declined'] = 'May not modify a non-source package of type {}'.format(kind)
|
||||
self.review_messages['declined'] = f'May not modify a non-source package of type {kind}'
|
||||
return False
|
||||
|
||||
if not self.allow_source_in_sle and self.sle_project_to_check:
|
||||
@ -219,7 +219,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
|
||||
req = self.__ensure_add_role_request(source_project)
|
||||
if req:
|
||||
declined_msg += ' Created the add_role request %s for addressing this problem.' % req
|
||||
declined_msg += f' Created the add_role request {req} for addressing this problem.'
|
||||
|
||||
self.review_messages['declined'] = declined_msg
|
||||
return False
|
||||
@ -229,9 +229,9 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
return False
|
||||
|
||||
# Checkout and see if renaming package screws up version parsing.
|
||||
copath = os.path.expanduser('~/co/%s' % self.request.reqid)
|
||||
copath = os.path.expanduser(f'~/co/{self.request.reqid}')
|
||||
if os.path.exists(copath):
|
||||
self.logger.warning('directory %s already exists' % copath)
|
||||
self.logger.warning(f'directory {copath} already exists')
|
||||
shutil.rmtree(copath)
|
||||
os.makedirs(copath)
|
||||
os.chdir(copath)
|
||||
@ -243,7 +243,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
os.rename(target_package, '_old')
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
self.logger.info('target package does not exist %s/%s' % (target_project, target_package))
|
||||
self.logger.info(f'target package does not exist {target_project}/{target_package}')
|
||||
else:
|
||||
raise e
|
||||
|
||||
@ -304,21 +304,21 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
known_maintainer = False
|
||||
if maintainers:
|
||||
if submitter in maintainers:
|
||||
self.logger.debug("%s is maintainer" % submitter)
|
||||
self.logger.debug(f"{submitter} is maintainer")
|
||||
known_maintainer = True
|
||||
if not known_maintainer:
|
||||
for r in self.request.reviews:
|
||||
if r.by_user in maintainers:
|
||||
self.logger.debug("found %s as reviewer" % r.by_user)
|
||||
self.logger.debug(f"found {r.by_user} as reviewer")
|
||||
known_maintainer = True
|
||||
if not known_maintainer:
|
||||
self.logger.warning("submitter: %s, maintainers: %s => need review" % (submitter, ','.join(maintainers)))
|
||||
self.logger.debug("adding review to %s/%s" % (devel_project, devel_package))
|
||||
self.logger.warning(f"submitter: {submitter}, maintainers: {','.join(maintainers)} => need review")
|
||||
self.logger.debug(f"adding review to {devel_project}/{devel_package}")
|
||||
msg = ('Submission for {} by someone who is not maintainer in '
|
||||
'the devel project ({}). Please review').format(target_package, devel_project)
|
||||
self.add_review(self.request, by_project=devel_project, by_package=devel_package, msg=msg)
|
||||
else:
|
||||
self.logger.warning("%s doesn't have devel project" % target_package)
|
||||
self.logger.warning(f"{target_package} doesn't have devel project")
|
||||
|
||||
if self.only_changes():
|
||||
self.logger.debug('only .changes modifications')
|
||||
@ -338,7 +338,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
|
||||
# Allow any projects already used as devel projects for other packages.
|
||||
search = {
|
||||
'package': "@project='%s' and devel/@project='%s'" % (target_project, source_project),
|
||||
'package': f"@project='{target_project}' and devel/@project='{source_project}'",
|
||||
}
|
||||
result = osc.core.search(self.apiurl, **search)
|
||||
return result['package'].attrib['matches'] != '0'
|
||||
@ -455,7 +455,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
source_project - source project name
|
||||
"""
|
||||
self.logger.info(
|
||||
'Checking required maintainer from the source project (%s)' % self.required_maintainer
|
||||
f'Checking required maintainer from the source project ({self.required_maintainer})'
|
||||
)
|
||||
if not self.required_maintainer:
|
||||
return True
|
||||
@ -475,12 +475,12 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
if len(add_roles) > 0:
|
||||
return add_roles[0].reqid
|
||||
else:
|
||||
add_role_msg = 'Created automatically from request %s' % self.request.reqid
|
||||
add_role_msg = f'Created automatically from request {self.request.reqid}'
|
||||
return create_add_role_request(self.apiurl, source_project, self.required_maintainer,
|
||||
'maintainer', message=add_role_msg)
|
||||
except HTTPError as e:
|
||||
self.logger.error(
|
||||
'Cannot create the corresponding add_role request for %s: %s' % (self.request.reqid, e)
|
||||
f'Cannot create the corresponding add_role request for {self.request.reqid}: {e}'
|
||||
)
|
||||
|
||||
def __is_required_maintainer(self, request):
|
||||
@ -516,7 +516,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
try:
|
||||
xml = ET.parse(osc.core.http_GET(url)).getroot()
|
||||
except HTTPError as e:
|
||||
self.logger.error('ERROR in URL %s [%s]' % (url, e))
|
||||
self.logger.error(f'ERROR in URL {url} [{e}]')
|
||||
return ret
|
||||
|
||||
if xml.find('error') is not None:
|
||||
@ -552,11 +552,11 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
|
||||
def check_action_add_role(self, request, action):
|
||||
# Decline add_role request (assumed the bot acting on requests to Factory or similar).
|
||||
message = 'Roles to packages are granted in the devel project, not in %s.' % action.tgt_project
|
||||
message = f'Roles to packages are granted in the devel project, not in {action.tgt_project}.'
|
||||
|
||||
if action.tgt_package is not None:
|
||||
project, package = devel_project_fallback(self.apiurl, action.tgt_project, action.tgt_package)
|
||||
message += ' Send this request to {}/{}.'.format(project, package)
|
||||
message += f' Send this request to {project}/{package}.'
|
||||
|
||||
self.review_messages['declined'] = message
|
||||
return False
|
||||
@ -595,13 +595,13 @@ class CheckSource(ReviewBot.ReviewBot):
|
||||
if linked.get('project', action.tgt_project) != action.tgt_project:
|
||||
return True
|
||||
linked_package = linked.get('package')
|
||||
self.review_messages['declined'] = "Delete the package %s instead" % (linked_package)
|
||||
self.review_messages['declined'] = f"Delete the package {linked_package} instead"
|
||||
return False
|
||||
|
||||
def check_action_delete_project(self, request, action):
|
||||
# Presumably if the request is valid the bot should be disabled or
|
||||
# overridden, but seems like no valid case for allowing this (see #1696).
|
||||
self.review_messages['declined'] = 'Deleting the {} project is not allowed.'.format(action.tgt_project)
|
||||
self.review_messages['declined'] = f'Deleting the {action.tgt_project} project is not allowed.'
|
||||
return False
|
||||
|
||||
def check_action_delete_repository(self, request, action):
|
||||
|
@ -36,27 +36,27 @@ class FactorySourceChecker(ReviewBot.ReviewBot):
|
||||
if src_srcinfo is None:
|
||||
# source package does not exist?
|
||||
# handle here to avoid crashing on the next line
|
||||
self.logger.info("Could not get source info for %s/%s@%s" % (src_project, src_package, src_rev))
|
||||
self.logger.info(f"Could not get source info for {src_project}/{src_package}@{src_rev}")
|
||||
return False
|
||||
projects = self._package_get_upstream_projects(target_package)
|
||||
if projects is None:
|
||||
self.logger.error("no upstream project found for {}, can't check".format(target_package))
|
||||
self.logger.error(f"no upstream project found for {target_package}, can't check")
|
||||
return False
|
||||
|
||||
self.review_messages['declined'] = 'the package needs to be accepted in {} first'.format(' or '.join(projects))
|
||||
self.review_messages['declined'] = f"the package needs to be accepted in {' or '.join(projects)} first"
|
||||
for project in projects:
|
||||
self.logger.info("Checking in project %s" % project)
|
||||
self.logger.info(f"Checking in project {project}")
|
||||
good = self._check_matching_srcmd5(project, target_package, src_srcinfo.verifymd5, self.history_limit)
|
||||
if good:
|
||||
self.logger.info("{} is in {}".format(target_package, project))
|
||||
self.logger.info(f"{target_package} is in {project}")
|
||||
return good
|
||||
|
||||
good = self._check_requests(project, target_package, src_srcinfo.verifymd5)
|
||||
if good:
|
||||
self.logger.info("{} already reviewed for {}".format(target_package, project))
|
||||
self.logger.info(f"{target_package} already reviewed for {project}")
|
||||
|
||||
if not good:
|
||||
self.logger.info('{} failed source submission check'.format(target_package))
|
||||
self.logger.info(f'{target_package} failed source submission check')
|
||||
|
||||
return good
|
||||
|
||||
@ -167,7 +167,7 @@ by OBS on which this bot relies.
|
||||
|
||||
def isNewPackage(self, tgt_project, tgt_package):
|
||||
try:
|
||||
self.logger.debug("package_meta %s %s/%s" % (self.apiurl, tgt_project, tgt_package))
|
||||
self.logger.debug(f"package_meta {self.apiurl} {tgt_project}/{tgt_package}")
|
||||
osc.core.show_package_meta(self.apiurl, tgt_project, tgt_package)
|
||||
except (HTTPError, URLError):
|
||||
return True
|
||||
@ -200,7 +200,7 @@ by OBS on which this bot relies.
|
||||
self.logger.debug("reject: diff contains no tags")
|
||||
return False
|
||||
if deleted > 0:
|
||||
self.review_messages['declined'] = '{} issue reference(s) deleted'.format(deleted)
|
||||
self.review_messages['declined'] = f'{deleted} issue reference(s) deleted'
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -30,7 +30,7 @@ for arg in args:
|
||||
elif re.search(r'packages', arg):
|
||||
repo.add_susetags(argf, 0, None)
|
||||
else:
|
||||
print("%s: unknown repo type" % (arg))
|
||||
print(f"{arg}: unknown repo type")
|
||||
sys.exit(1)
|
||||
|
||||
# we only want self-provides
|
||||
@ -59,4 +59,4 @@ for p in firstrepo.solvables:
|
||||
src = p.lookup_str(solv.SOLVABLE_SOURCENAME)
|
||||
if src is None:
|
||||
src = "?"
|
||||
print("%s: %s is older than %s from %s" % (src, p, pp, pp.repo))
|
||||
print(f"{src}: {p} is older than {pp} from {pp.repo}")
|
||||
|
@ -60,7 +60,7 @@ class CompareList(object):
|
||||
apiurl = 'https://api.suse.de'
|
||||
else:
|
||||
apiurl = self.apiurl
|
||||
query = "match=state/@name='accepted'+and+(action/target/@project='{}'+and+action/@type='delete')".format(project)
|
||||
query = f"match=state/@name='accepted'+and+(action/target/@project='{project}'+and+action/@type='delete')"
|
||||
url = makeurl(apiurl, ['search', 'request'], query)
|
||||
f = http_GET(url)
|
||||
root = ET.parse(f).getroot()
|
||||
@ -77,12 +77,12 @@ class CompareList(object):
|
||||
return False
|
||||
|
||||
for linked in links:
|
||||
if linked.get('project') == project and linked.get('package').startswith("%s." % package):
|
||||
if linked.get('project') == project and linked.get('package').startswith(f"{package}."):
|
||||
return False
|
||||
return True
|
||||
|
||||
def check_diff(self, package, old_prj, new_prj):
|
||||
logging.debug('checking %s ...' % package)
|
||||
logging.debug(f'checking {package} ...')
|
||||
query = {'cmd': 'diff',
|
||||
'view': 'xml',
|
||||
'oproject': old_prj,
|
||||
@ -90,9 +90,9 @@ class CompareList(object):
|
||||
u = makeurl(self.apiurl, ['source', new_prj, package], query=query)
|
||||
root = ET.parse(http_POST(u)).getroot()
|
||||
old_srcmd5 = root.findall('old')[0].get('srcmd5')
|
||||
logging.debug('%s old srcmd5 %s in %s' % (package, old_srcmd5, old_prj))
|
||||
logging.debug(f'{package} old srcmd5 {old_srcmd5} in {old_prj}')
|
||||
new_srcmd5 = root.findall('new')[0].get('srcmd5')
|
||||
logging.debug('%s new srcmd5 %s in %s' % (package, new_srcmd5, new_prj))
|
||||
logging.debug(f'{package} new srcmd5 {new_srcmd5} in {new_prj}')
|
||||
# Compare srcmd5
|
||||
if old_srcmd5 != new_srcmd5:
|
||||
# check if it has diff element
|
||||
@ -104,13 +104,13 @@ class CompareList(object):
|
||||
def submit_new_package(self, source, target, package, msg=None):
|
||||
req = osc.core.get_request_list(self.apiurl, target, package, req_state=('new', 'review', 'declined'))
|
||||
if req:
|
||||
print("There is a request to %s / %s already, skip!" % (target, package))
|
||||
print(f"There is a request to {target} / {package} already, skip!")
|
||||
else:
|
||||
if not msg:
|
||||
msg = 'New package submitted by compare_pkglist'
|
||||
res = osc.core.create_submit_request(self.apiurl, source, package, target, package, message=msg)
|
||||
if res and res is not None:
|
||||
print('Created request %s for %s' % (res, package))
|
||||
print(f'Created request {res} for {package}')
|
||||
return True
|
||||
else:
|
||||
print('Error occurred when creating the submit request')
|
||||
@ -124,20 +124,20 @@ class CompareList(object):
|
||||
return
|
||||
if self.submitfrom and self.submitto:
|
||||
if not self.item_exists(self.submitfrom):
|
||||
print("Project %s is not exist" % self.submitfrom)
|
||||
print(f"Project {self.submitfrom} is not exist")
|
||||
return
|
||||
if not self.item_exists(self.submitto):
|
||||
print("Project %s is not exist" % self.submitto)
|
||||
print(f"Project {self.submitto} is not exist")
|
||||
return
|
||||
|
||||
# get souce packages from target
|
||||
print('Gathering the package list from %s' % self.old_prj)
|
||||
print(f'Gathering the package list from {self.old_prj}')
|
||||
source = self.get_source_packages(self.old_prj)
|
||||
print('Gathering the package list from %s' % self.new_prj)
|
||||
print(f'Gathering the package list from {self.new_prj}')
|
||||
target = self.get_source_packages(self.new_prj)
|
||||
removed_packages = self.removed_pkglist(self.old_prj)
|
||||
if self.existin:
|
||||
print('Gathering the package list from %s' % self.existin)
|
||||
print(f'Gathering the package list from {self.existin}')
|
||||
existin_packages = self.get_source_packages(self.existin)
|
||||
|
||||
if not self.removedonly:
|
||||
@ -162,10 +162,10 @@ class CompareList(object):
|
||||
continue
|
||||
|
||||
if pkg in removed_pkgs_in_target:
|
||||
print("New package but has removed from {:<8} - {}".format(self.new_prj, pkg))
|
||||
print(f"New package but has removed from {self.new_prj:<8} - {pkg}")
|
||||
continue
|
||||
|
||||
print("New package than {:<8} - {}".format(self.new_prj, pkg))
|
||||
print(f"New package than {self.new_prj:<8} - {pkg}")
|
||||
|
||||
if self.submit:
|
||||
if self.submit_limit and submit_counter > int(self.submit_limit):
|
||||
@ -173,11 +173,11 @@ class CompareList(object):
|
||||
|
||||
if self.submitfrom and self.submitto:
|
||||
if not self.item_exists(self.submitfrom, pkg):
|
||||
print("%s not found in %s" % (pkg, self.submitfrom))
|
||||
print(f"{pkg} not found in {self.submitfrom}")
|
||||
continue
|
||||
msg = "Automated submission of a package from %s to %s" % (self.submitfrom, self.submitto)
|
||||
msg = f"Automated submission of a package from {self.submitfrom} to {self.submitto}"
|
||||
if self.existin:
|
||||
msg += " that was included in %s" % (self.existin)
|
||||
msg += f" that was included in {self.existin}"
|
||||
if self.submit_new_package(self.submitfrom, self.submitto, pkg, msg):
|
||||
submit_counter += 1
|
||||
else:
|
||||
@ -188,13 +188,13 @@ class CompareList(object):
|
||||
elif not self.newonly:
|
||||
diff = self.check_diff(pkg, self.old_prj, self.new_prj)
|
||||
if diff:
|
||||
print("Different source in {:<8} - {}".format(self.new_prj, pkg))
|
||||
print(f"Different source in {self.new_prj:<8} - {pkg}")
|
||||
if self.verbose:
|
||||
print("=== Diff ===\n{}".format(diff))
|
||||
print(f"=== Diff ===\n{diff}")
|
||||
|
||||
for pkg in removed_packages:
|
||||
if pkg in target:
|
||||
print("Deleted package in {:<8} - {}".format(self.old_prj, pkg))
|
||||
print(f"Deleted package in {self.old_prj:<8} - {pkg}")
|
||||
|
||||
|
||||
def main(args):
|
||||
@ -214,10 +214,10 @@ if __name__ == '__main__':
|
||||
parser.add_argument('-d', '--debug', action='store_true',
|
||||
help='print info useful for debuging')
|
||||
parser.add_argument('-o', '--old', dest='old_prj', metavar='PROJECT',
|
||||
help='the old project where to compare (default: %s)' % SLE,
|
||||
help=f'the old project where to compare (default: {SLE})',
|
||||
default=SLE)
|
||||
parser.add_argument('-n', '--new', dest='new_prj', metavar='PROJECT',
|
||||
help='the new project where to compare (default: %s)' % OPENSUSE,
|
||||
help=f'the new project where to compare (default: {OPENSUSE})',
|
||||
default=OPENSUSE)
|
||||
parser.add_argument('-v', '--verbose', action='store_true',
|
||||
help='show the diff')
|
||||
|
@ -68,11 +68,11 @@ class ContainerCleaner(ToolBase.ToolBase):
|
||||
if len(bins) > 0:
|
||||
match = regex_srccontainer.match(buildcontainer)
|
||||
if not match:
|
||||
raise Exception("Could not map %s to source container" % buildcontainer)
|
||||
raise Exception(f"Could not map {buildcontainer} to source container")
|
||||
|
||||
srccontainer = match.group(1)
|
||||
if srccontainer not in srccontainers:
|
||||
raise Exception("Mapped %s to wrong source container (%s)" % (buildcontainer, srccontainer))
|
||||
raise Exception(f"Mapped {buildcontainer} to wrong source container ({srccontainer})")
|
||||
|
||||
if srccontainer not in srccontainerarchs:
|
||||
srccontainerarchs[srccontainer] = []
|
||||
|
32
deptool.py
32
deptool.py
@ -68,7 +68,7 @@ class DepTool(cmdln.Cmdln):
|
||||
name = os.path.basename(os.path.splitext(r)[0])
|
||||
repo = self.pool.add_repo(name)
|
||||
repo.add_solv(r)
|
||||
logger.debug("add repo %s" % name)
|
||||
logger.debug(f"add repo {name}")
|
||||
else:
|
||||
try:
|
||||
if r.endswith('.repo'):
|
||||
@ -82,7 +82,7 @@ class DepTool(cmdln.Cmdln):
|
||||
repo.add_solv(solvfile % name)
|
||||
if parser.has_option(name, 'priority'):
|
||||
repo.priority = parser.getint(name, 'priority')
|
||||
logger.debug("add repo %s" % name)
|
||||
logger.debug(f"add repo {name}")
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
@ -130,14 +130,14 @@ class DepTool(cmdln.Cmdln):
|
||||
sel = self.pool.select(str(lock), solv.Selection.SELECTION_NAME)
|
||||
if sel.isempty():
|
||||
# if we can't find it, it probably is not as important
|
||||
logger.debug('locked package {} not found'.format(lock))
|
||||
logger.debug(f'locked package {lock} not found')
|
||||
else:
|
||||
jobs += sel.jobs(solv.Job.SOLVER_LOCK)
|
||||
|
||||
for n in packages:
|
||||
sel = self.pool.select(str(n), solv.Selection.SELECTION_NAME)
|
||||
if sel.isempty():
|
||||
logger.error('package {} not found'.format(n))
|
||||
logger.error(f'package {n} not found')
|
||||
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
||||
|
||||
solver = self.pool.Solver()
|
||||
@ -166,13 +166,13 @@ class DepTool(cmdln.Cmdln):
|
||||
if reason == solv.Solver.SOLVER_REASON_WEAKDEP:
|
||||
for v in solver.describe_weakdep_decision(s):
|
||||
reason2, s2, dep = v
|
||||
print("-> %s %s %s" % (s2.name, REASONS[reason2], dep))
|
||||
print(f"-> {s2.name} {REASONS[reason2]} {dep}")
|
||||
else:
|
||||
print("-> %s %s %s" % (s.name, REASONS[reason], ruleinfo))
|
||||
print(f"-> {s.name} {REASONS[reason]} {ruleinfo}")
|
||||
|
||||
if opts.size:
|
||||
size = trans.calc_installsizechange()
|
||||
print("SIZE %s" % (size))
|
||||
print(f"SIZE {size}")
|
||||
|
||||
return True
|
||||
|
||||
@ -212,13 +212,13 @@ class DepTool(cmdln.Cmdln):
|
||||
if sel.isempty():
|
||||
logger.error("%s not found", n)
|
||||
for s in sel.solvables():
|
||||
print('- {}-{}@{}:'.format(s.name, s.evr, s.arch))
|
||||
print(f'- {s.name}-{s.evr}@{s.arch}:')
|
||||
for kind in ('RECOMMENDS', 'REQUIRES', 'SUPPLEMENTS', 'ENHANCES', 'PROVIDES', 'SUGGESTS'):
|
||||
deps = s.lookup_deparray(getattr(solv, 'SOLVABLE_' + kind), 0)
|
||||
if deps:
|
||||
print(' {}:'.format(kind))
|
||||
print(f' {kind}:')
|
||||
for dep in deps:
|
||||
print(' - {}'.format(dep))
|
||||
print(f' - {dep}')
|
||||
|
||||
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
||||
help="repo to use")
|
||||
@ -234,7 +234,7 @@ class DepTool(cmdln.Cmdln):
|
||||
for r in relation:
|
||||
i = self.pool.str2id(r)
|
||||
for s in self.pool.whatprovides(i):
|
||||
print('- {}-{}@{}:'.format(s.name, s.evr, s.arch))
|
||||
print(f'- {s.name}-{s.evr}@{s.arch}:')
|
||||
|
||||
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
||||
help="repo to use")
|
||||
@ -256,7 +256,7 @@ class DepTool(cmdln.Cmdln):
|
||||
if name.startswith('pattern-order()'):
|
||||
# XXX: no function in bindings to do that properly
|
||||
order = name[name.find('= ') + 2:]
|
||||
print("{} {}".format(order, s.name))
|
||||
print(f"{order} {s.name}")
|
||||
|
||||
@cmdln.option("--providers", action="store_true",
|
||||
help="also show other providers")
|
||||
@ -288,7 +288,7 @@ class DepTool(cmdln.Cmdln):
|
||||
logger.info('nothing %s %s', kind.lower(), r)
|
||||
continue
|
||||
for s in sel.solvables():
|
||||
print(' {}: {}-{}@{}'.format(r, s.name, s.evr, s.arch))
|
||||
print(f' {r}: {s.name}-{s.evr}@{s.arch}')
|
||||
else:
|
||||
for n in args:
|
||||
sel = self.pool.select(n, solv.Selection.SELECTION_NAME)
|
||||
@ -312,7 +312,7 @@ class DepTool(cmdln.Cmdln):
|
||||
if not kindprinted:
|
||||
print(kind)
|
||||
kindprinted = True
|
||||
print(' {}: {}-{}@{}'.format(p, r.name, r.evr, r.arch))
|
||||
print(f' {p}: {r.name}-{r.evr}@{r.arch}')
|
||||
|
||||
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
||||
help="repo to use")
|
||||
@ -340,7 +340,7 @@ class DepTool(cmdln.Cmdln):
|
||||
if not kindprinted:
|
||||
print(kind)
|
||||
kindprinted = True
|
||||
print(' {}-{}@{}'.format(r.name, r.evr, r.arch))
|
||||
print(f' {r.name}-{r.evr}@{r.arch}')
|
||||
|
||||
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
||||
help="repo to use")
|
||||
@ -362,7 +362,7 @@ class DepTool(cmdln.Cmdln):
|
||||
# pretty stupid, just lookup strings
|
||||
value = s.lookup_str(sid)
|
||||
if value:
|
||||
print('{}: {}'.format(attr[len('SOLVABLE_'):], value))
|
||||
print(f"{attr[len('SOLVABLE_'):]}: {value}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -56,7 +56,7 @@ def devel_projects_get(apiurl, project):
|
||||
"""
|
||||
devel_projects = {}
|
||||
|
||||
root = search(apiurl, **{'package': "@project='{}'".format(project)})['package']
|
||||
root = search(apiurl, **{'package': f"@project='{project}'"})['package']
|
||||
for devel in root.findall('package/devel[@project]'):
|
||||
devel_projects[devel.attrib['project']] = True
|
||||
|
||||
@ -103,7 +103,7 @@ def maintainer(args):
|
||||
groups = meta.xpath('group[@role="maintainer"]/@groupid')
|
||||
intersection = set(groups).intersection(desired)
|
||||
if len(intersection) != len(desired):
|
||||
print('{} missing {}'.format(devel_project, ', '.join(desired - intersection)))
|
||||
print(f"{devel_project} missing {', '.join(desired - intersection)}")
|
||||
|
||||
|
||||
def notify(args):
|
||||
@ -125,7 +125,7 @@ def notify(args):
|
||||
maintainer_map.setdefault(userid, set())
|
||||
maintainer_map[userid].add(devel_package_identifier)
|
||||
|
||||
subject = 'Packages you maintain are present in {}'.format(args.project)
|
||||
subject = f'Packages you maintain are present in {args.project}'
|
||||
for userid, package_identifiers in maintainer_map.items():
|
||||
email = entity_email(apiurl, userid)
|
||||
message = """This is a friendly reminder about your packages in {}.
|
||||
@ -146,14 +146,14 @@ in charge of the following packages:
|
||||
- {}""".format(
|
||||
args.project, '\n- '.join(sorted(package_identifiers)))
|
||||
|
||||
log = 'notified {} of {} packages'.format(userid, len(package_identifiers))
|
||||
log = f'notified {userid} of {len(package_identifiers)} packages'
|
||||
try:
|
||||
mail_send(apiurl, args.project, email, subject, message, dry=args.dry)
|
||||
print(log)
|
||||
except smtplib.SMTPRecipientsRefused:
|
||||
print('[FAILED ADDRESS] {} ({})'.format(log, email))
|
||||
print(f'[FAILED ADDRESS] {log} ({email})')
|
||||
except smtplib.SMTPException as e:
|
||||
print('[FAILED SMTP] {} ({})'.format(log, e))
|
||||
print(f'[FAILED SMTP] {log} ({e})')
|
||||
|
||||
|
||||
def requests(args):
|
||||
@ -176,7 +176,7 @@ def requests(args):
|
||||
request.reqid,
|
||||
'/'.join((action.tgt_project, action.tgt_package)),
|
||||
'/'.join((action.src_project, action.src_package)),
|
||||
'({} days old)'.format(age),
|
||||
f'({age} days old)',
|
||||
)))
|
||||
|
||||
if args.remind:
|
||||
@ -210,7 +210,7 @@ def reviews(args):
|
||||
request.reqid,
|
||||
'/'.join((review.by_project, review.by_package)) if review.by_package else review.by_project,
|
||||
'/'.join((action.tgt_project, action.tgt_package)),
|
||||
'({} days old)'.format(age),
|
||||
f'({age} days old)',
|
||||
)))
|
||||
|
||||
if args.remind:
|
||||
@ -248,7 +248,7 @@ def remind_comment(apiurl, repeat_age, request_id, project, package=None):
|
||||
if comment:
|
||||
delta = datetime.utcnow() - comment['when']
|
||||
if delta.days < repeat_age:
|
||||
print(' skipping due to previous reminder from {} days ago'.format(delta.days))
|
||||
print(f' skipping due to previous reminder from {delta.days} days ago')
|
||||
return
|
||||
|
||||
# Repeat notification so remove old comment.
|
||||
@ -264,7 +264,7 @@ def remind_comment(apiurl, repeat_age, request_id, project, package=None):
|
||||
userids = sorted(maintainers_get(apiurl, project, package))
|
||||
if len(userids):
|
||||
users = ['@' + userid for userid in userids]
|
||||
message = '{}: {}'.format(', '.join(users), REMINDER)
|
||||
message = f"{', '.join(users)}: {REMINDER}"
|
||||
else:
|
||||
message = REMINDER
|
||||
print(' ' + message)
|
||||
|
2
dist/ci/smtp/eml-server.py
vendored
2
dist/ci/smtp/eml-server.py
vendored
@ -17,7 +17,7 @@ class EmlServer(SMTPServer):
|
||||
f = open(filename, 'w')
|
||||
f.write(data)
|
||||
f.close
|
||||
print('%s saved.' % filename)
|
||||
print(f'{filename} saved.')
|
||||
self.no += 1
|
||||
|
||||
|
||||
|
@ -115,7 +115,7 @@ class DockerImagePublisherRegistry(DockerImagePublisher):
|
||||
|
||||
def getDockerArch(self, arch):
|
||||
if arch not in self.MAP_ARCH_RPM_DOCKER:
|
||||
raise DockerPublishException("Unknown arch %s" % arch)
|
||||
raise DockerPublishException(f"Unknown arch {arch}")
|
||||
|
||||
return self.MAP_ARCH_RPM_DOCKER[arch]
|
||||
|
||||
@ -288,7 +288,7 @@ class DockerImageFetcherURL(DockerImageFetcher):
|
||||
tar_file.write(requests.get(self.url).content)
|
||||
with tempfile.TemporaryDirectory() as tar_dir:
|
||||
# Extract the .tar.xz into the dir
|
||||
subprocess.call("tar -xaf '%s' -C '%s'" % (tar_file.name, tar_dir), shell=True)
|
||||
subprocess.call(f"tar -xaf '{tar_file.name}' -C '{tar_dir}'", shell=True)
|
||||
return callback(tar_dir)
|
||||
|
||||
|
||||
@ -354,7 +354,7 @@ class DockerImageFetcherOBS(DockerImageFetcher):
|
||||
tar_file.write(requests.get(self.newest_release_url + "/" + filename).content)
|
||||
with tempfile.TemporaryDirectory() as tar_dir:
|
||||
# Extract the .tar into the dir
|
||||
subprocess.call("tar -xaf '%s' -C '%s'" % (tar_file.name, tar_dir), shell=True)
|
||||
subprocess.call(f"tar -xaf '{tar_file.name}' -C '{tar_dir}'", shell=True)
|
||||
return callback(tar_dir)
|
||||
|
||||
|
||||
@ -412,25 +412,25 @@ def run():
|
||||
success = True
|
||||
|
||||
for distro in args.distros:
|
||||
print("Handling %s" % distro)
|
||||
print(f"Handling {distro}")
|
||||
|
||||
archs_to_update = {}
|
||||
fetchers = config[distro]['fetchers']
|
||||
publisher = config[distro]['publisher']
|
||||
|
||||
for arch in fetchers:
|
||||
print("\tArchitecture %s" % arch)
|
||||
print(f"\tArchitecture {arch}")
|
||||
try:
|
||||
current = fetchers[arch].currentVersion()
|
||||
print("\t\tAvailable version: %s" % current)
|
||||
print(f"\t\tAvailable version: {current}")
|
||||
|
||||
released = publisher.releasedDockerImageVersion(arch)
|
||||
print("\t\tReleased version: %s" % released)
|
||||
print(f"\t\tReleased version: {released}")
|
||||
|
||||
if current != released:
|
||||
archs_to_update[arch] = current
|
||||
except Exception as e:
|
||||
print("\t\tException during version fetching: %s" % e)
|
||||
print(f"\t\tException during version fetching: {e}")
|
||||
|
||||
if not archs_to_update:
|
||||
print("\tNothing to do.")
|
||||
@ -444,7 +444,7 @@ def run():
|
||||
need_to_upload = False
|
||||
|
||||
for arch, version in archs_to_update.items():
|
||||
print("\tUpdating %s image to version %s" % (arch, version))
|
||||
print(f"\tUpdating {arch} image to version {version}")
|
||||
try:
|
||||
fetchers[arch].getDockerImage(lambda image_path: publisher.addImage(version=version,
|
||||
arch=arch,
|
||||
@ -452,11 +452,11 @@ def run():
|
||||
need_to_upload = True
|
||||
|
||||
except DockerFetchException as dfe:
|
||||
print("\t\tCould not fetch the image: %s" % dfe)
|
||||
print(f"\t\tCould not fetch the image: {dfe}")
|
||||
success = False
|
||||
continue
|
||||
except DockerPublishException as dpe:
|
||||
print("\t\tCould not publish the image: %s" % dpe)
|
||||
print(f"\t\tCould not publish the image: {dpe}")
|
||||
success = False
|
||||
continue
|
||||
|
||||
|
@ -40,7 +40,7 @@ class DockerRegistryClient():
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.repository = repository
|
||||
self.scopes = ["repository:%s:pull,push,delete" % repository]
|
||||
self.scopes = [f"repository:{repository}:pull,push,delete"]
|
||||
self.token = None
|
||||
|
||||
class DockerRegistryError(Exception):
|
||||
@ -63,7 +63,7 @@ class DockerRegistryClient():
|
||||
bearer_dict[assignment[0]] = assignment[1].strip('"')
|
||||
|
||||
scope_param = "&scope=".join([""] + [urllib.parse.quote(scope) for scope in self.scopes])
|
||||
response = requests.get("%s?service=%s%s" % (bearer_dict['realm'], bearer_dict['service'], scope_param),
|
||||
response = requests.get(f"{bearer_dict['realm']}?service={bearer_dict['service']}{scope_param}",
|
||||
auth=(self.username, self.password))
|
||||
self.token = response.json()['token']
|
||||
|
||||
@ -123,7 +123,7 @@ class DockerRegistryClient():
|
||||
alg.update(content)
|
||||
reference = "sha256:" + alg.hexdigest()
|
||||
|
||||
resp = self.doHttpCall("PUT", "/v2/%s/manifests/%s" % (self.repository, reference),
|
||||
resp = self.doHttpCall("PUT", f"/v2/{self.repository}/manifests/{reference}",
|
||||
headers={'Content-Type': content_json['mediaType']},
|
||||
data=content)
|
||||
|
||||
@ -153,7 +153,7 @@ class DockerRegistryClient():
|
||||
def getManifest(self, reference):
|
||||
"""Get a (json-parsed) manifest with the given reference (digest or tag).
|
||||
If the manifest does not exist, return None. For other errors, False."""
|
||||
resp = self.doHttpCall("GET", "/v2/%s/manifests/%s" % (self.repository, reference),
|
||||
resp = self.doHttpCall("GET", f"/v2/{self.repository}/manifests/{reference}",
|
||||
headers={'Accept': "application/vnd.docker.distribution.manifest.list.v2+json,application/vnd.docker.distribution.manifest.v2+json"}) # noqa: E501
|
||||
|
||||
if resp.status_code == 404:
|
||||
@ -167,7 +167,7 @@ class DockerRegistryClient():
|
||||
def getManifestDigest(self, reference):
|
||||
"""Return the digest of the manifest with the given reference.
|
||||
If the manifest doesn't exist or the request fails, it returns False."""
|
||||
resp = self.doHttpCall("HEAD", "/v2/%s/manifests/%s" % (self.repository, reference),
|
||||
resp = self.doHttpCall("HEAD", f"/v2/{self.repository}/manifests/{reference}",
|
||||
headers={'Accept': "application/vnd.docker.distribution.manifest.list.v2+json,application/vnd.docker.distribution.manifest.v2+json"}) # noqa: E501
|
||||
|
||||
if resp.status_code != 200:
|
||||
@ -177,7 +177,7 @@ class DockerRegistryClient():
|
||||
|
||||
def deleteManifest(self, digest):
|
||||
"""Delete the manifest with the given reference."""
|
||||
resp = self.doHttpCall("DELETE", "/v2/%s/manifests/%s" % (self.repository, digest))
|
||||
resp = self.doHttpCall("DELETE", f"/v2/{self.repository}/manifests/{digest}")
|
||||
|
||||
return resp.status_code == 202
|
||||
|
||||
@ -193,7 +193,7 @@ class DockerRegistryClient():
|
||||
raise Exception("Invalid digest")
|
||||
|
||||
# Check whether the blob already exists - don't upload it needlessly.
|
||||
stat_request = self.doHttpCall("HEAD", "/v2/%s/blobs/%s" % (self.repository, digest))
|
||||
stat_request = self.doHttpCall("HEAD", f"/v2/{self.repository}/blobs/{digest}")
|
||||
if stat_request.status_code == 200 or stat_request.status_code == 307:
|
||||
return True
|
||||
|
||||
@ -204,7 +204,7 @@ class DockerRegistryClient():
|
||||
content = blob.read()
|
||||
|
||||
# First request an upload "slot", we get an URL we can PUT to back
|
||||
upload_request = self.doHttpCall("POST", "/v2/%s/blobs/uploads/" % self.repository)
|
||||
upload_request = self.doHttpCall("POST", f"/v2/{self.repository}/blobs/uploads/")
|
||||
if upload_request.status_code == 202:
|
||||
location = upload_request.headers['Location']
|
||||
upload = self.doHttpCall("PUT", location + "&digest=" + digest,
|
||||
|
@ -81,7 +81,7 @@ if not options.version:
|
||||
conn.request('HEAD', u.path)
|
||||
res = conn.getresponse()
|
||||
if res.status != 302:
|
||||
raise Exception("http fail: %s %s" % (res.status, res.reason))
|
||||
raise Exception(f"http fail: {res.status} {res.reason}")
|
||||
|
||||
loc = res.getheader('location')
|
||||
if loc is None:
|
||||
@ -89,7 +89,7 @@ if not options.version:
|
||||
|
||||
m = re.search(r'(?:Snapshot|Build)([\d.]+)-Media', loc)
|
||||
if m is None:
|
||||
raise Exception("failed to parse %s" % loc)
|
||||
raise Exception(f"failed to parse {loc}")
|
||||
|
||||
version = m.group(1)
|
||||
logger.debug("found version %s", version)
|
||||
@ -117,7 +117,7 @@ conn = http.client.HTTPConnection(u.hostname, 80)
|
||||
conn.request('GET', u.path)
|
||||
res = conn.getresponse()
|
||||
if res.status != 200:
|
||||
raise Exception("http %s fail: %s %s" % (u, res.status, res.reason))
|
||||
raise Exception(f"http {u} fail: {res.status} {res.reason}")
|
||||
|
||||
txt = res.read().decode('latin1')
|
||||
if '====' not in txt:
|
||||
@ -136,7 +136,7 @@ if options.dry:
|
||||
print("sending ...")
|
||||
print(msg.as_string())
|
||||
else:
|
||||
logger.info("announcing version {}".format(version))
|
||||
logger.info(f"announcing version {version}")
|
||||
s = smtplib.SMTP(config['relay'])
|
||||
s.send_message(msg)
|
||||
s.quit()
|
||||
|
@ -32,7 +32,7 @@ def list(dirpath):
|
||||
for i in sorted(os.listdir(_dir), reverse=True):
|
||||
if not digits_re.match(i):
|
||||
continue
|
||||
ret = ret + '<a href="diff/%s">%s</a>' % (i, i)
|
||||
ret = ret + f'<a href="diff/{i}">{i}</a>'
|
||||
if i == current:
|
||||
ret = ret + " <--"
|
||||
ret = ret + '<br/>'
|
||||
|
@ -87,9 +87,9 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
'kernel-vanilla',
|
||||
'kernel-xen',
|
||||
):
|
||||
srpm = '%s-%s-%s.src.rpm' % ('kernel-source', m.group('version'), m.group('release'))
|
||||
srpm = f"kernel-source-{m.group('version')}-{m.group('release')}.src.rpm"
|
||||
pkgdata[binrpm]['sourcerpm'] = srpm
|
||||
print("%s -> %s" % (utf8str(h['sourcerpm']), srpm))
|
||||
print(f"{utf8str(h['sourcerpm'])} -> {srpm}")
|
||||
|
||||
if srpm in changelogs:
|
||||
changelogs[srpm]['packages'].append(binrpm)
|
||||
@ -104,7 +104,7 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
def _walk_through_iso_image(iso, path="/"):
|
||||
file_stats = iso.readdir(path)
|
||||
if file_stats is None:
|
||||
raise Exception("Unable to find directory %s inside the iso image" % path)
|
||||
raise Exception(f"Unable to find directory {path} inside the iso image")
|
||||
|
||||
for stat in file_stats:
|
||||
filename = stat[0]
|
||||
@ -128,7 +128,7 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
fd = os.open(arg, os.O_RDONLY)
|
||||
|
||||
if not iso.is_open() or fd is None:
|
||||
raise Exception("Could not open %s as an ISO-9660 image." % arg)
|
||||
raise Exception(f"Could not open {arg} as an ISO-9660 image.")
|
||||
|
||||
for filename, LSN in _walk_through_iso_image(iso):
|
||||
os.lseek(fd, LSN * pycdio.ISO_BLOCKSIZE, io.SEEK_SET)
|
||||
@ -145,7 +145,7 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
h = self.readRpmHeader(pkg)
|
||||
_getdata(h)
|
||||
else:
|
||||
raise Exception("don't know what to do with %s" % arg)
|
||||
raise Exception(f"don't know what to do with {arg}")
|
||||
|
||||
return pkgdata, changelogs
|
||||
|
||||
@ -161,7 +161,7 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
if not opts.dir:
|
||||
raise Exception("need --dir option")
|
||||
if not os.path.isdir(opts.dir):
|
||||
raise Exception("%s must be a directory" % opts.dir)
|
||||
raise Exception(f"{opts.dir} must be a directory")
|
||||
if not opts.snapshot:
|
||||
raise Exception("missing snapshot option")
|
||||
|
||||
@ -207,18 +207,18 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
if not opts.dir:
|
||||
raise Exception("need --dir option")
|
||||
if not os.path.isdir(opts.dir):
|
||||
raise Exception("%s must be a directory" % opts.dir)
|
||||
raise Exception(f"{opts.dir} must be a directory")
|
||||
|
||||
f = open(os.path.join(opts.dir, version1), 'rb')
|
||||
(v, (v1pkgs, v1changelogs)) = pickle.load(f,
|
||||
encoding='utf-8', errors='backslashreplace')
|
||||
if v != data_version:
|
||||
raise Exception("not matching version %s in %s" % (v, version1))
|
||||
raise Exception(f"not matching version {v} in {version1}")
|
||||
f = open(os.path.join(opts.dir, version2), 'rb')
|
||||
(v, (v2pkgs, v2changelogs)) = pickle.load(f,
|
||||
encoding='utf-8', errors='backslashreplace')
|
||||
if v != data_version:
|
||||
raise Exception("not matching version %s in %s" % (v, version2))
|
||||
raise Exception(f"not matching version {v} in {version2}")
|
||||
|
||||
p1 = set(v1pkgs.keys())
|
||||
p2 = set(v2pkgs.keys())
|
||||
@ -237,7 +237,7 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
try:
|
||||
t1 = v1changelogs[srpm1]['changelogtime'][0]
|
||||
except IndexError:
|
||||
print("{} doesn't have a changelog".format(srpm1), file=sys.stderr)
|
||||
print(f"{srpm1} doesn't have a changelog", file=sys.stderr)
|
||||
continue
|
||||
m = SRPM_RE.match(srpm)
|
||||
if m:
|
||||
@ -245,21 +245,21 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
else:
|
||||
name = srpm
|
||||
if len(v2changelogs[srpm]['changelogtime']) == 0:
|
||||
print(' {} ERROR: no changelog'.format(name))
|
||||
print(f' {name} ERROR: no changelog')
|
||||
continue
|
||||
if t1 == v2changelogs[srpm]['changelogtime'][0]:
|
||||
continue # no new changelog entry, probably just rebuilt
|
||||
pkgs = sorted(group[srpm])
|
||||
details += "\n==== %s ====\n" % name
|
||||
details += f"\n==== {name} ====\n"
|
||||
if v1pkgs[pkgs[0]]['version'] != v2pkgs[pkgs[0]]['version']:
|
||||
print(" %s (%s -> %s)" % (name, v1pkgs[pkgs[0]]['version'],
|
||||
v2pkgs[pkgs[0]]['version']))
|
||||
details += "Version update (%s -> %s)\n" % (v1pkgs[pkgs[0]]['version'],
|
||||
v2pkgs[pkgs[0]]['version'])
|
||||
else:
|
||||
print(" %s" % name)
|
||||
print(f" {name}")
|
||||
if len(pkgs) > 1:
|
||||
details += "Subpackages: %s\n" % " ".join([p for p in pkgs if p != name])
|
||||
details += f"Subpackages: {' '.join([p for p in pkgs if p != name])}\n"
|
||||
|
||||
changedetails = ""
|
||||
for (i2, t2) in enumerate(v2changelogs[srpm]['changelogtime']):
|
||||
@ -273,7 +273,7 @@ class ChangeLogger(cmdln.Cmdln):
|
||||
if len(changedetails_lines) > changelog_max_lines + 5:
|
||||
changedetails = '\n'.join(changedetails_lines[0:changelog_max_lines])
|
||||
left = len(changedetails_lines) - changelog_max_lines - 1
|
||||
changedetails += '\n ... changelog too long, skipping {} lines ...\n'.format(left)
|
||||
changedetails += f'\n ... changelog too long, skipping {left} lines ...\n'
|
||||
# add last line of changelog diff so that it's possible to
|
||||
# find out the end of the changelog section
|
||||
changedetails += changedetails_lines[-1]
|
||||
|
@ -19,7 +19,7 @@ from osclib.core import project_pseudometa_package
|
||||
OPENSUSE = 'openSUSE:Leap:15.2'
|
||||
OPENSUSE_PREVERSION = 'openSUSE:Leap:15.1'
|
||||
OPENSUSE_RELEASED_VERSION = ['openSUSE:Leap:15.0', 'openSUSE:Leap:15.1']
|
||||
FCC = '{}:FactoryCandidates'.format(OPENSUSE)
|
||||
FCC = f'{OPENSUSE}:FactoryCandidates'
|
||||
|
||||
makeurl = osc.core.makeurl
|
||||
http_GET = osc.core.http_GET
|
||||
@ -51,7 +51,7 @@ class FccFreezer(object):
|
||||
add to the frozenlinks, can be the ignored package.
|
||||
"""
|
||||
package = si.get('package')
|
||||
logging.debug("Processing %s" % (package))
|
||||
logging.debug(f"Processing {package}")
|
||||
|
||||
# If the package is an internal one (e.g _product)
|
||||
if package.startswith('_') or package.startswith('Test-DVD') or package.startswith('000'):
|
||||
@ -72,7 +72,7 @@ class FccFreezer(object):
|
||||
proot = ET.parse(f).getroot()
|
||||
lsrcmd5 = proot.get('lsrcmd5')
|
||||
if lsrcmd5 is None:
|
||||
raise Exception("{}/{} is not a link but we expected one".format(self.factory, package))
|
||||
raise Exception(f"{self.factory}/{package} is not a link but we expected one")
|
||||
ET.SubElement(flink, 'package', {'name': package, 'srcmd5': lsrcmd5, 'vrev': si.get('vrev')})
|
||||
return None
|
||||
|
||||
@ -111,7 +111,7 @@ class FccFreezer(object):
|
||||
if self.debug:
|
||||
logging.debug("Dump ignored source")
|
||||
for source in ignored_sources:
|
||||
logging.debug("Ignored source: %s" % source)
|
||||
logging.debug(f"Ignored source: {source}")
|
||||
|
||||
url = makeurl(self.apiurl, ['source', FCC, '_project', '_frozenlinks'], {'meta': '1'})
|
||||
link = ET.tostring(flink)
|
||||
@ -206,7 +206,7 @@ class FccSubmitter(object):
|
||||
|
||||
def is_new_package(self, tgt_project, tgt_package):
|
||||
try:
|
||||
logging.debug("Gathering package_meta %s/%s" % (tgt_project, tgt_package))
|
||||
logging.debug(f"Gathering package_meta {tgt_project}/{tgt_package}")
|
||||
osc.core.show_package_meta(self.apiurl, tgt_project, tgt_package)
|
||||
except (HTTPError, URLError):
|
||||
return True
|
||||
@ -217,7 +217,7 @@ class FccSubmitter(object):
|
||||
src_project = self.factory # submit from Factory only
|
||||
dst_project = self.to_prj
|
||||
|
||||
msg = 'Automatic request from %s by F-C-C Submitter. Please review this change and decline it if Leap do not need it.' % src_project
|
||||
msg = f'Automatic request from {src_project} by F-C-C Submitter. Please review this change and decline it if Leap do not need it.'
|
||||
res = osc.core.create_submit_request(self.apiurl,
|
||||
src_project,
|
||||
package,
|
||||
@ -253,7 +253,7 @@ class FccSubmitter(object):
|
||||
def is_sle_base_pkgs(self, package):
|
||||
link = self.get_link(self.to_prj, package)
|
||||
if link is None or link.get('project') not in self.sle_base_prjs:
|
||||
logging.debug("%s not from SLE base" % package)
|
||||
logging.debug(f"{package} not from SLE base")
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -262,7 +262,7 @@ class FccSubmitter(object):
|
||||
succeeded_packages = []
|
||||
succeeded_packages = self.get_build_succeeded_packages(self.from_prj)
|
||||
if not len(succeeded_packages) > 0:
|
||||
logging.info('No build succeeded package in %s' % self.from_prj)
|
||||
logging.info(f'No build succeeded package in {self.from_prj}')
|
||||
return
|
||||
|
||||
print('Build succeeded packages:')
|
||||
@ -271,7 +271,7 @@ class FccSubmitter(object):
|
||||
print(pkg)
|
||||
|
||||
print('-------------------------------------')
|
||||
print("Found {} build succeded packages".format(len(succeeded_packages)))
|
||||
print(f"Found {len(succeeded_packages)} build succeded packages")
|
||||
|
||||
def get_deleted_packages(self, project):
|
||||
query = 'states=accepted&types=delete&project={}&view=collection'
|
||||
@ -288,7 +288,7 @@ class FccSubmitter(object):
|
||||
return pkgs
|
||||
|
||||
def load_skip_pkgs_list(self, project, package):
|
||||
url = makeurl(self.apiurl, ['source', project, package, '{}?expand=1'.format('fcc_skip_pkgs')])
|
||||
url = makeurl(self.apiurl, ['source', project, package, 'fcc_skip_pkgs?expand=1'])
|
||||
try:
|
||||
return http_GET(url).read()
|
||||
except HTTPError:
|
||||
@ -299,7 +299,7 @@ class FccSubmitter(object):
|
||||
succeeded_packages = []
|
||||
succeeded_packages = self.get_build_succeeded_packages(self.from_prj)
|
||||
if not len(succeeded_packages) > 0:
|
||||
logging.info('No build succeeded package in %s' % self.from_prj)
|
||||
logging.info(f'No build succeeded package in {self.from_prj}')
|
||||
return
|
||||
|
||||
# randomize the list
|
||||
@ -321,22 +321,22 @@ class FccSubmitter(object):
|
||||
submit_ok = True
|
||||
|
||||
if package in deleted_packages:
|
||||
logging.info('%s has been dropped from %s, ignore it!' % (package, self.to_prj))
|
||||
logging.info(f'{package} has been dropped from {self.to_prj}, ignore it!')
|
||||
submit_ok = False
|
||||
|
||||
if self.is_sle_base_pkgs(package) is True:
|
||||
logging.info('%s origin from SLE base, skip for now!' % package)
|
||||
logging.info(f'{package} origin from SLE base, skip for now!')
|
||||
submit_ok = False
|
||||
|
||||
# make sure it is new package
|
||||
new_pkg = self.is_new_package(self.to_prj, package)
|
||||
if new_pkg is not True:
|
||||
logging.info('%s is not a new package, do not submit.' % package)
|
||||
logging.info(f'{package} is not a new package, do not submit.')
|
||||
submit_ok = False
|
||||
|
||||
multi_specs = self.check_multiple_specfiles(self.factory, package)
|
||||
if multi_specs is None:
|
||||
logging.info('%s does not exist in %s' % (package, 'openSUSE:Factory'))
|
||||
logging.info(f'{package} does not exist in openSUSE:Factory')
|
||||
submit_ok = False
|
||||
|
||||
if multi_specs:
|
||||
@ -348,7 +348,7 @@ class FccSubmitter(object):
|
||||
|
||||
for spec in multi_specs['specs']:
|
||||
if spec not in succeeded_packages:
|
||||
logging.info('%s is sub-pacakge of %s but build failed, skip it!' % (spec, package))
|
||||
logging.info(f'{spec} is sub-pacakge of {package} but build failed, skip it!')
|
||||
submit_ok = False
|
||||
|
||||
if not submit_ok:
|
||||
@ -375,7 +375,7 @@ class FccSubmitter(object):
|
||||
match = True
|
||||
|
||||
if match is not True:
|
||||
logging.info('%s/%s is in the skip list, do not submit.' % (devel_prj, package))
|
||||
logging.info(f'{devel_prj}/{package} is in the skip list, do not submit.')
|
||||
continue
|
||||
else:
|
||||
pass
|
||||
@ -388,18 +388,18 @@ class FccSubmitter(object):
|
||||
match = True
|
||||
|
||||
if match is True:
|
||||
logging.info('%s is in the skip list, do not submit.' % package)
|
||||
logging.info(f'{package} is in the skip list, do not submit.')
|
||||
continue
|
||||
else:
|
||||
pass
|
||||
|
||||
res = self.create_submitrequest(package)
|
||||
if res and res is not None:
|
||||
logging.info('Created request %s for %s' % (res, package))
|
||||
logging.info(f'Created request {res} for {package}')
|
||||
else:
|
||||
logging.error('Error occurred when creating submit request')
|
||||
else:
|
||||
logging.debug('%s is exist in %s, skip!' % (package, self.to_prj))
|
||||
logging.debug(f'{package} is exist in {self.to_prj}, skip!')
|
||||
time.sleep(5)
|
||||
|
||||
# dump multi specs packages
|
||||
@ -417,7 +417,7 @@ def main(args):
|
||||
osc.conf.config['debug'] = args.debug
|
||||
|
||||
if args.freeze:
|
||||
print('freezing {}'.format(FCC))
|
||||
print(f'freezing {FCC}')
|
||||
freezer = FccFreezer()
|
||||
freezer.freeze()
|
||||
else:
|
||||
@ -436,10 +436,10 @@ if __name__ == '__main__':
|
||||
parser.add_argument('-d', '--debug', action='store_true',
|
||||
help='print info useful for debuging')
|
||||
parser.add_argument('-f', '--from', dest='from_prj', metavar='PROJECT',
|
||||
help='project where to check (default: %s)' % FCC,
|
||||
help=f'project where to check (default: {FCC})',
|
||||
default=FCC)
|
||||
parser.add_argument('-t', '--to', dest='to_prj', metavar='PROJECT',
|
||||
help='project where to submit the packages (default: %s)' % OPENSUSE,
|
||||
help=f'project where to submit the packages (default: {OPENSUSE})',
|
||||
default=OPENSUSE)
|
||||
parser.add_argument('-r', '--freeze', dest='freeze', action='store_true', help='rebase FCC project')
|
||||
parser.add_argument('-s', '--list', dest='list_packages', action='store_true', help='list build succeeded packages')
|
||||
|
@ -35,9 +35,9 @@ def notify_project(openqa, state):
|
||||
logger.debug(f'{state} did not change')
|
||||
return
|
||||
try:
|
||||
openqa.openqa_request('PUT', 'obs_rsync/{}/runs?repository={}'.format(project, repository), retries=0)
|
||||
openqa.openqa_request('PUT', f'obs_rsync/{project}/runs?repository={repository}', retries=0)
|
||||
except RequestError as e:
|
||||
logger.info("Got exception on syncing repository: {}".format(e))
|
||||
logger.info(f"Got exception on syncing repository: {e}")
|
||||
return
|
||||
copyfile(old_filename(state), new_filename(state))
|
||||
subprocess.run(f'cd {args.to} && git add . && git commit -m "Update of {project}/{repository}" && git push', shell=True, check=True)
|
||||
@ -70,7 +70,7 @@ if __name__ == '__main__':
|
||||
interesting_repos[f'{project}_-_{repository}'] = 1
|
||||
|
||||
openqa = OpenQA_Client(server=args.openqa)
|
||||
for state in glob.glob('{}/*.yaml'.format(args.repos)):
|
||||
for state in glob.glob(f'{args.repos}/*.yaml'):
|
||||
state = basename(state).replace('.yaml', '')
|
||||
if state not in interesting_repos:
|
||||
continue
|
||||
|
@ -44,7 +44,7 @@ class Project(object):
|
||||
def map_iso(self, staging_project, iso):
|
||||
parts = self.replace_string.split('/')
|
||||
if parts[0] != 's':
|
||||
raise Exception("{}'s iso_replace_string does not start with s/".format(self.name))
|
||||
raise Exception(f"{self.name}'s iso_replace_string does not start with s/")
|
||||
old = parts[1]
|
||||
new = parts[2]
|
||||
new = new.replace('$LETTER', self.staging_letter(staging_project))
|
||||
@ -316,10 +316,10 @@ class Listener(PubSubConsumer):
|
||||
return [job for job in jobs if self.is_production_job(job)]
|
||||
|
||||
def get_step_url(self, testurl, modulename):
|
||||
failurl = testurl + '/modules/{!s}/fails'.format(quote_plus(modulename))
|
||||
failurl = testurl + f'/modules/{quote_plus(modulename)!s}/fails'
|
||||
fails = requests.get(failurl).json()
|
||||
failed_step = fails.get('first_failed_step', 1)
|
||||
return "{!s}#step/{!s}/{:d}".format(testurl, modulename, failed_step)
|
||||
return f"{testurl!s}#step/{modulename!s}/{failed_step:d}"
|
||||
|
||||
def test_url(self, job):
|
||||
url = self.openqa_url + ("/tests/%d" % job['id'])
|
||||
@ -340,7 +340,7 @@ class Listener(PubSubConsumer):
|
||||
|
||||
def on_message(self, unused_channel, method, properties, body):
|
||||
self.acknowledge_message(method.delivery_tag)
|
||||
if method.routing_key == '{}.obs.repo.published'.format(amqp_prefix):
|
||||
if method.routing_key == f'{amqp_prefix}.obs.repo.published':
|
||||
self.on_published_repo(json.loads(body))
|
||||
elif re.search(r'.openqa.', method.routing_key):
|
||||
data = json.loads(body)
|
||||
@ -351,7 +351,7 @@ class Listener(PubSubConsumer):
|
||||
elif data.get('HDD_1'):
|
||||
self.on_openqa_job(data.get('HDD_1'))
|
||||
else:
|
||||
self.logger.warning("unknown rabbitmq message {}".format(method.routing_key))
|
||||
self.logger.warning(f"unknown rabbitmq message {method.routing_key}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -61,10 +61,10 @@ class Listener(PubSubConsumer):
|
||||
for arch in archs:
|
||||
repoid = self.check_arch(project, repository, arch)
|
||||
if not repoid:
|
||||
self.logger.info('{}/{}/{} not yet done'.format(project, repository, arch))
|
||||
self.logger.info(f'{project}/{repository}/{arch} not yet done')
|
||||
return None
|
||||
ids[arch] = repoid
|
||||
self.logger.info('All of {}/{} finished'.format(project, repository))
|
||||
self.logger.info(f'All of {project}/{repository} finished')
|
||||
return ids
|
||||
|
||||
def is_part_of_namespaces(self, project):
|
||||
@ -76,7 +76,7 @@ class Listener(PubSubConsumer):
|
||||
# now we are (re-)connected to the bus and need to fetch the
|
||||
# initial state
|
||||
for namespace in self.namespaces:
|
||||
for state in glob.glob('{}*.yaml'.format(namespace)):
|
||||
for state in glob.glob(f'{namespace}*.yaml'):
|
||||
state = state.replace('.yaml', '')
|
||||
# split
|
||||
project, repository = state.split('_-_')
|
||||
@ -117,8 +117,8 @@ class Listener(PubSubConsumer):
|
||||
pathname = project + '_-_' + repository + '.yaml'
|
||||
with open(pathname, 'w') as f:
|
||||
for arch in sorted(ids.keys()):
|
||||
f.write('{}: {}\n'.format(arch, ids[arch]))
|
||||
self.push_git('Repository update: {}/{}'.format(project, repository))
|
||||
f.write(f'{arch}: {ids[arch]}\n')
|
||||
self.push_git(f'Repository update: {project}/{repository}')
|
||||
|
||||
def on_message(self, unused_channel, method, properties, body):
|
||||
self.logger.debug("on_message")
|
||||
@ -131,11 +131,11 @@ class Listener(PubSubConsumer):
|
||||
if not self.is_part_of_namespaces(body['project']):
|
||||
return
|
||||
self.restart_timer()
|
||||
self.logger.info('Repo finished event: {}/{}/{}'.format(body['project'], body['repo'], body['arch']))
|
||||
self.logger.info(f"Repo finished event: {body['project']}/{body['repo']}/{body['arch']}")
|
||||
self.update_repo(body['project'], body['repo'])
|
||||
else:
|
||||
self.logger.warning(
|
||||
'unknown rabbitmq message {}'.format(method.routing_key))
|
||||
f'unknown rabbitmq message {method.routing_key}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -33,7 +33,7 @@ if __name__ == '__main__':
|
||||
root = ET.parse(http_GET(url)).getroot()
|
||||
if root.get('code') == 'finished':
|
||||
continue
|
||||
logger.error('Repository {}/{}/{} is not yet finished'.format(args.project, args.repository, arch))
|
||||
logger.error(f'Repository {args.project}/{args.repository}/{arch} is not yet finished')
|
||||
logger.debug(ET.tostring(root).decode('utf-8'))
|
||||
# scheduling means the scheduler had some reason to double check the repository state.
|
||||
# this may or may not result in a restart of the build, but if it doesn't, we're in trouble.
|
||||
@ -54,9 +54,9 @@ if __name__ == '__main__':
|
||||
if count.get('code') in ['succeeded', 'excluded', 'disabled']:
|
||||
counts[count.get('code')] = int(count.get('count'))
|
||||
continue
|
||||
logger.error('Repository {}/{} has {} packages'.format(args.project, args.repository, count.get('code')))
|
||||
logger.error(f"Repository {args.project}/{args.repository} has {count.get('code')} packages")
|
||||
sys.exit(1)
|
||||
|
||||
if counts['disabled'] > counts['succeeded']:
|
||||
logger.error('Repository {}/{} has more disabled packages than succeeded'.format(args.project, args.repository))
|
||||
logger.error(f'Repository {args.project}/{args.repository} has more disabled packages than succeeded')
|
||||
sys.exit(1)
|
||||
|
@ -64,10 +64,10 @@ def bug_owner(apiurl, package, entity='person'):
|
||||
url = osc.core.makeurl(apiurl, ('search', 'owner'), query=query)
|
||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||
|
||||
bugowner = root.find('.//{}[@role="bugowner"]'.format(entity))
|
||||
bugowner = root.find(f'.//{entity}[@role="bugowner"]')
|
||||
if bugowner is not None:
|
||||
return entity_email(apiurl, bugowner.get('name'), entity)
|
||||
maintainer = root.find('.//{}[@role="maintainer"]'.format(entity))
|
||||
maintainer = root.find(f'.//{entity}[@role="maintainer"]')
|
||||
if maintainer is not None:
|
||||
return entity_email(apiurl, maintainer.get('name'), entity)
|
||||
if entity == 'person':
|
||||
@ -108,7 +108,7 @@ def bugzilla_init(apiurl):
|
||||
def prompt_continue(change_count):
|
||||
allowed = ['y', 'b', 's', 'n', '']
|
||||
if change_count > 0:
|
||||
print('File bug for {} issues and continue? [y/b/s/n/?] (y): '.format(change_count), end='')
|
||||
print(f'File bug for {change_count} issues and continue? [y/b/s/n/?] (y): ', end='')
|
||||
else:
|
||||
print('No changes for which to create bug, continue? [y/b/s/n/?] (y): ', end='')
|
||||
|
||||
@ -120,7 +120,7 @@ def prompt_continue(change_count):
|
||||
response = 'y'
|
||||
return response
|
||||
else:
|
||||
print('Invalid response: {}'.format(response))
|
||||
print(f'Invalid response: {response}')
|
||||
|
||||
return prompt_continue(change_count)
|
||||
|
||||
@ -128,7 +128,7 @@ def prompt_continue(change_count):
|
||||
def prompt_interactive(changes, project, package):
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.yml') as temp:
|
||||
temp.write(yaml.safe_dump(changes, default_flow_style=False, default_style="'") + '\n')
|
||||
temp.write('# {}/{}\n'.format(project, package))
|
||||
temp.write(f'# {project}/{package}\n')
|
||||
temp.write('# comment or remove lines to whitelist issues')
|
||||
temp.flush()
|
||||
|
||||
@ -161,7 +161,7 @@ def issue_normalize(trackers, tracker, name):
|
||||
if tracker in trackers:
|
||||
return trackers[tracker].replace('@@@', name)
|
||||
|
||||
print('WARNING: ignoring unknown tracker {} for {}'.format(tracker, name))
|
||||
print(f'WARNING: ignoring unknown tracker {tracker} for {name}')
|
||||
return None
|
||||
|
||||
|
||||
@ -227,10 +227,10 @@ def print_stats(db):
|
||||
reported += 1
|
||||
else:
|
||||
whitelisted += 1
|
||||
print('Packages: {}'.format(len(db)))
|
||||
print('Bugs: {}'.format(len(set(bug_ids))))
|
||||
print('Reported: {}'.format(reported))
|
||||
print('Whitelisted: {}'.format(whitelisted))
|
||||
print(f'Packages: {len(db)}')
|
||||
print(f'Bugs: {len(set(bug_ids))}')
|
||||
print(f'Reported: {reported}')
|
||||
print(f'Whitelisted: {whitelisted}')
|
||||
|
||||
|
||||
def main(args):
|
||||
@ -252,14 +252,14 @@ def main(args):
|
||||
git_repo_url = 'git@github.com:jberry-suse/openSUSE-release-tools-issue-db.git'
|
||||
git_message = 'Sync issue-diff.py changes.'
|
||||
db_dir = sync(args.cache_dir, git_repo_url, git_message)
|
||||
db_file = os.path.join(db_dir, '{}.yml'.format(args.project))
|
||||
db_file = os.path.join(db_dir, f'{args.project}.yml')
|
||||
|
||||
if os.path.exists(db_file):
|
||||
db = yaml.safe_load(open(db_file).read())
|
||||
if db is None:
|
||||
db = {}
|
||||
else:
|
||||
print('Loaded db file: {}'.format(db_file))
|
||||
print(f'Loaded db file: {db_file}')
|
||||
else:
|
||||
db = {}
|
||||
|
||||
@ -267,7 +267,7 @@ def main(args):
|
||||
print_stats(db)
|
||||
return
|
||||
|
||||
print('Comparing {} against {}'.format(args.project, args.factory))
|
||||
print(f'Comparing {args.project} against {args.factory}')
|
||||
|
||||
bugzilla_api = bugzilla_init(args.bugzilla_apiurl)
|
||||
bugzilla_defaults = (args.bugzilla_product, args.bugzilla_component, args.bugzilla_version)
|
||||
@ -280,9 +280,9 @@ def main(args):
|
||||
shuffle(list(packages))
|
||||
for index, package in enumerate(packages, start=1):
|
||||
if index % 50 == 0:
|
||||
print('Checked {} of {}'.format(index, len(packages)))
|
||||
print(f'Checked {index} of {len(packages)}')
|
||||
if package in db and db[package] == 'whitelist':
|
||||
print('Skipping package {}'.format(package))
|
||||
print(f'Skipping package {package}')
|
||||
continue
|
||||
|
||||
issues_project = issues_get(apiurl, args.project, package, trackers, db)
|
||||
@ -299,7 +299,7 @@ def main(args):
|
||||
if len(missing_from_factory) == 0:
|
||||
continue
|
||||
|
||||
print('{}: {} missing'.format(package, len(missing_from_factory)))
|
||||
print(f'{package}: {len(missing_from_factory)} missing')
|
||||
|
||||
# Generate summaries for issues missing from factory.
|
||||
changes = {}
|
||||
@ -361,12 +361,12 @@ def main(args):
|
||||
break
|
||||
except Fault as e:
|
||||
if 'There is no component named' in e.faultString:
|
||||
print('Invalid component {}, fallback to default'.format(meta[1]))
|
||||
print(f'Invalid component {meta[1]}, fallback to default')
|
||||
meta = (meta[0], bugzilla_defaults[1], meta[2])
|
||||
elif 'is not a valid username' in e.faultString:
|
||||
username = e.faultString.split(' ', 3)[2]
|
||||
cc.remove(username)
|
||||
print('Removed invalid username {}'.format(username))
|
||||
print(f'Removed invalid username {username}')
|
||||
else:
|
||||
raise e
|
||||
tries += 1
|
||||
@ -389,9 +389,9 @@ def main(args):
|
||||
yaml.safe_dump(db, outfile, default_flow_style=False, default_style="'")
|
||||
|
||||
if notified > 0:
|
||||
print('{}: {} notified in bug {}, {} whitelisted'.format(package, notified, bug_id, whitelisted))
|
||||
print(f'{package}: {notified} notified in bug {bug_id}, {whitelisted} whitelisted')
|
||||
else:
|
||||
print('{}: {} whitelisted'.format(package, whitelisted))
|
||||
print(f'{package}: {whitelisted} whitelisted')
|
||||
|
||||
if response == 'b':
|
||||
break
|
||||
|
@ -46,7 +46,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
return http_GET(url)
|
||||
except HTTPError as e:
|
||||
if 500 <= e.code <= 599:
|
||||
self.logger.debug('Retrying {}'.format(url))
|
||||
self.logger.debug(f'Retrying {url}')
|
||||
time.sleep(1)
|
||||
return self.retried_GET(url)
|
||||
raise e
|
||||
@ -107,7 +107,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
return True
|
||||
to_review = self.open_reviews.get(self.request_nick(), None)
|
||||
if to_review:
|
||||
self.logger.info("Found {}".format(json.dumps(to_review)))
|
||||
self.logger.info(f"Found {json.dumps(to_review)}")
|
||||
to_review = to_review or self.create_db_entry(
|
||||
src_project, src_package, src_rev)
|
||||
if not to_review:
|
||||
@ -117,7 +117,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
url = osc.core.makeurl(self.legaldb, ['package', str(pack)])
|
||||
report = REQ.get(url, headers=self.legaldb_headers).json()
|
||||
if report.get('priority', 0) != self.request_priority():
|
||||
self.logger.debug('Update priority {}'.format(self.request_priority()))
|
||||
self.logger.debug(f'Update priority {self.request_priority()}')
|
||||
url = osc.core.makeurl(
|
||||
self.legaldb, ['package', str(pack)], {'priority': self.request_priority()})
|
||||
REQ.patch(url, headers=self.legaldb_headers)
|
||||
@ -149,7 +149,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
self.message = "@{} declined the legal report with the following comment: {}".format(
|
||||
user, comment)
|
||||
else:
|
||||
self.message = "@{} declined the legal report".format(user)
|
||||
self.message = f"@{user} declined the legal report"
|
||||
return None
|
||||
return False
|
||||
# print url, json.dumps(report)
|
||||
@ -161,11 +161,11 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
self.message = None
|
||||
result = super(LegalAuto, self).check_one_request(req)
|
||||
if result is None and self.message is not None:
|
||||
self.logger.debug("Result of {}: {}".format(req.reqid, self.message))
|
||||
self.logger.debug(f"Result of {req.reqid}: {self.message}")
|
||||
return result
|
||||
|
||||
def check_action__default(self, req, a):
|
||||
self.logger.error("unhandled request type %s" % a.type)
|
||||
self.logger.error(f"unhandled request type {a.type}")
|
||||
return True
|
||||
|
||||
def prepare_review(self):
|
||||
@ -199,11 +199,10 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
# overload as we need to get of the bot_request
|
||||
def _set_review(self, req, state):
|
||||
if self.dryrun:
|
||||
self.logger.debug("dry setting %s to %s with %s" %
|
||||
(req.reqid, state, self.message))
|
||||
self.logger.debug(f"dry setting {req.reqid} to {state} with {self.message}")
|
||||
return
|
||||
|
||||
self.logger.debug("setting %s to %s" % (req.reqid, state))
|
||||
self.logger.debug(f"setting {req.reqid} to {state}")
|
||||
osc.core.change_review_state(apiurl=self.apiurl,
|
||||
reqid=req.reqid, newstate=state,
|
||||
by_group=self.review_group,
|
||||
@ -211,7 +210,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
self.delete_from_db(req.reqid)
|
||||
|
||||
def update_project(self, project):
|
||||
yaml_path = os.path.join(CacheManager.directory('legal-auto'), '{}.yaml'.format(project))
|
||||
yaml_path = os.path.join(CacheManager.directory('legal-auto'), f'{project}.yaml')
|
||||
try:
|
||||
with open(yaml_path, 'r') as file:
|
||||
self.pkg_cache = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
@ -256,7 +255,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
if match and match.group(1) == package:
|
||||
lpackage = package
|
||||
if package != lpackage:
|
||||
self.logger.info("SKIP {}, it links to {}".format(package, lpackage))
|
||||
self.logger.info(f"SKIP {package}, it links to {lpackage}")
|
||||
skip = True
|
||||
break
|
||||
if skip:
|
||||
@ -282,7 +281,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
||||
if 'saved' not in obj:
|
||||
return None
|
||||
legaldb_id = obj['saved']['id']
|
||||
self.logger.debug("PKG {}/{}[{}]->{} is {}".format(sproject, package, revision, tproject, legaldb_id))
|
||||
self.logger.debug(f"PKG {sproject}/{package}[{revision}]->{tproject} is {legaldb_id}")
|
||||
self.pkg_cache[package] = {revision: legaldb_id}
|
||||
if obj['saved']['state'] == 'obsolete':
|
||||
url = osc.core.makeurl(self.legaldb, ['packages', 'import', str(legaldb_id)], {
|
||||
|
@ -20,11 +20,11 @@ class MaintInstCheck(ReviewBot.ReviewBot):
|
||||
|
||||
def repository_check(self, repository_pairs, archs):
|
||||
project, repository = repository_pairs[0]
|
||||
self.logger.info('checking {}/{}'.format(project, repository))
|
||||
self.logger.info(f'checking {project}/{repository}')
|
||||
|
||||
if not len(archs):
|
||||
self.logger.debug(
|
||||
'{} has no relevant architectures'.format(project))
|
||||
f'{project} has no relevant architectures')
|
||||
return
|
||||
|
||||
for arch in archs:
|
||||
@ -36,7 +36,7 @@ class MaintInstCheck(ReviewBot.ReviewBot):
|
||||
parts = installcheck(directories, arch, [], [])
|
||||
if len(parts):
|
||||
self.comment.append(
|
||||
'## {}/{}\n'.format(repository_pairs[0][1], arch))
|
||||
f'## {repository_pairs[0][1]}/{arch}\n')
|
||||
self.comment.extend(parts)
|
||||
|
||||
return len(self.comment) == 0
|
||||
@ -71,7 +71,7 @@ class MaintInstCheck(ReviewBot.ReviewBot):
|
||||
# targeting multiple projects such as in maintenance workflow in
|
||||
# which the message should be set by other actions.
|
||||
self.logger.debug(
|
||||
'skipping review of action targeting {}'.format(action.tgt_project))
|
||||
f'skipping review of action targeting {action.tgt_project}')
|
||||
return True
|
||||
|
||||
repository = target_config.get('main-repo')
|
||||
|
26
metrics.py
26
metrics.py
@ -75,7 +75,7 @@ def search_paginated_generator(apiurl, queries=None, **kwargs):
|
||||
while True:
|
||||
collection = osc.core.search(apiurl, queries, **kwargs)['request']
|
||||
if not request_count:
|
||||
print('processing {:,} requests'.format(int(collection.get('matches'))))
|
||||
print(f"processing {int(collection.get('matches')):,} requests")
|
||||
|
||||
for request in collection.findall('request'):
|
||||
yield request
|
||||
@ -128,7 +128,7 @@ def ingest_requests(api, project):
|
||||
}
|
||||
# TODO Total time spent in backlog (ie factory-staging, but excluding when staged).
|
||||
|
||||
staged_first_review = request.xpath('review[contains(@by_project, "{}:Staging:")]'.format(project))
|
||||
staged_first_review = request.xpath(f'review[contains(@by_project, "{project}:Staging:")]')
|
||||
if len(staged_first_review):
|
||||
by_project = staged_first_review[0].get('by_project')
|
||||
request_tags['type'] = 'adi' if api.is_adi_project(by_project) else 'letter'
|
||||
@ -143,7 +143,7 @@ def ingest_requests(api, project):
|
||||
# All letter where whitelisted since no restriction.
|
||||
request_tags['whitelisted'] = request_tags['type'] == 'letter'
|
||||
|
||||
xpath = 'review[contains(@by_project, "{}:Staging:adi:") and @state="accepted"]/'.format(project)
|
||||
xpath = f'review[contains(@by_project, "{project}:Staging:adi:") and @state="accepted"]/'
|
||||
xpath += 'history[comment[text() = "ready to accept"]]/@when'
|
||||
ready_to_accept = request.xpath(xpath)
|
||||
if len(ready_to_accept):
|
||||
@ -169,7 +169,7 @@ def ingest_requests(api, project):
|
||||
|
||||
# Staging related reviews.
|
||||
for number, review in enumerate(
|
||||
request.xpath('review[contains(@by_project, "{}:Staging:")]'.format(project)), start=1):
|
||||
request.xpath(f'review[contains(@by_project, "{project}:Staging:")]'), start=1):
|
||||
staged_at = date_parse(review.get('when'))
|
||||
|
||||
project_type = 'adi' if api.is_adi_project(review.get('by_project')) else 'letter'
|
||||
@ -196,7 +196,7 @@ def ingest_requests(api, project):
|
||||
point('total', {'backlog': 1, 'staged': -1}, unselected_at, {'event': 'unselect'}, True)
|
||||
|
||||
# No-staging related reviews.
|
||||
for review in request.xpath('review[not(contains(@by_project, "{}:Staging:"))]'.format(project)):
|
||||
for review in request.xpath(f'review[not(contains(@by_project, "{project}:Staging:"))]'):
|
||||
tags = {
|
||||
# who_added is non-trivial due to openSUSE/open-build-service#3898.
|
||||
'state': review.get('state'),
|
||||
@ -246,9 +246,9 @@ def ingest_requests(api, project):
|
||||
if priority.text in found:
|
||||
point('priority', {'count': -1}, final_at, {'level': priority.text}, True)
|
||||
else:
|
||||
print('unable to find priority history entry for {} to {}'.format(request.get('id'), priority.text))
|
||||
print(f"unable to find priority history entry for {request.get('id')} to {priority.text}")
|
||||
|
||||
print('finalizing {:,} points'.format(len(points)))
|
||||
print(f'finalizing {len(points):,} points')
|
||||
return walk_points(points, project)
|
||||
|
||||
|
||||
@ -340,7 +340,7 @@ def walk_points(points, target):
|
||||
def ingest_release_schedule(project):
|
||||
points = []
|
||||
release_schedule = {}
|
||||
release_schedule_file = os.path.join(SOURCE_DIR, 'metrics/annotation/{}.yaml'.format(project))
|
||||
release_schedule_file = os.path.join(SOURCE_DIR, f'metrics/annotation/{project}.yaml')
|
||||
if project.endswith('Factory'):
|
||||
# TODO Pending resolution to #1250 regarding deployment.
|
||||
return 0
|
||||
@ -350,7 +350,7 @@ def ingest_release_schedule(project):
|
||||
'grep -oP "' + r'Changes\.\K\d{5,}' + '"'
|
||||
snapshots = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE).communicate()[0]
|
||||
for date in snapshots.split():
|
||||
release_schedule[datetime.strptime(date, '%Y%m%d')] = 'Snapshot {}'.format(date)
|
||||
release_schedule[datetime.strptime(date, '%Y%m%d')] = f'Snapshot {date}'
|
||||
elif os.path.isfile(release_schedule_file):
|
||||
# Load release schedule for non-rolling releases from yaml file.
|
||||
with open(release_schedule_file, 'r') as stream:
|
||||
@ -520,13 +520,13 @@ def ingest_dashboard(api):
|
||||
for filename in filenames:
|
||||
content = dashboard_at_changed(api, filename, revision)
|
||||
if content:
|
||||
map_func = globals()['ingest_dashboard_{}'.format(filename)]
|
||||
map_func = globals()[f'ingest_dashboard_{filename}']
|
||||
fields = map_func(content)
|
||||
if not len(fields):
|
||||
continue
|
||||
|
||||
points.append({
|
||||
'measurement': 'dashboard_{}'.format(filename),
|
||||
'measurement': f'dashboard_{filename}',
|
||||
'fields': fields,
|
||||
'time': time,
|
||||
})
|
||||
@ -548,7 +548,7 @@ def ingest_dashboard(api):
|
||||
client.write_points(points, 's')
|
||||
count += len(points)
|
||||
|
||||
print('last revision processed: {}'.format(revision if len(index) else 'none'))
|
||||
print(f"last revision processed: {revision if len(index) else 'none'}")
|
||||
|
||||
return count
|
||||
|
||||
@ -581,7 +581,7 @@ def main(args):
|
||||
Config(apiurl, args.project)
|
||||
api = StagingAPI(apiurl, args.project)
|
||||
|
||||
print('dashboard: wrote {:,} points'.format(ingest_dashboard(api)))
|
||||
print(f'dashboard: wrote {ingest_dashboard(api):,} points')
|
||||
|
||||
global who_workaround_swap, who_workaround_miss
|
||||
who_workaround_swap = who_workaround_miss = 0
|
||||
|
@ -9,7 +9,7 @@ BASEURL = 'http://review.tumbleweed.boombatower.com/data/'
|
||||
|
||||
|
||||
def data_load(name):
|
||||
response = requests.get(urljoin(BASEURL, '{}.yaml'.format(name)))
|
||||
response = requests.get(urljoin(BASEURL, f'{name}.yaml'))
|
||||
return yaml.safe_load(response.text)
|
||||
|
||||
|
||||
@ -21,8 +21,8 @@ def data_write(client, measurement, points):
|
||||
def ingest_data(client, name):
|
||||
data = data_load(name)
|
||||
|
||||
measurement = 'release_{}'.format(name)
|
||||
map_func = globals()['map_{}'.format(name)]
|
||||
measurement = f'release_{name}'
|
||||
map_func = globals()[f'map_{name}']
|
||||
points = []
|
||||
for release, details in data.items():
|
||||
points.append({
|
||||
@ -32,7 +32,7 @@ def ingest_data(client, name):
|
||||
})
|
||||
|
||||
data_write(client, measurement, points)
|
||||
print('wrote {} for {}'.format(len(points), name))
|
||||
print(f'wrote {len(points)} for {name}')
|
||||
|
||||
|
||||
def map_bug(bugs):
|
||||
|
@ -105,9 +105,9 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
|
||||
def is_incident_in_testing(self, incident):
|
||||
# hard coded for now as we only run this code for SUSE Maintenance workflow
|
||||
project = 'SUSE:Maintenance:{}'.format(incident)
|
||||
project = f'SUSE:Maintenance:{incident}'
|
||||
|
||||
xpath = "(state/@name='review') and (action/source/@project='{}' and action/@type='maintenance_release')".format(project)
|
||||
xpath = f"(state/@name='review') and (action/source/@project='{project}' and action/@type='maintenance_release')"
|
||||
res = osc.core.search(self.apiurl, request=xpath)['request']
|
||||
# return the one and only (or None)
|
||||
return res.find('request')
|
||||
@ -117,7 +117,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
get incident numbers from SUSE:Maintenance:Test project
|
||||
returns dict with openQA var name : string with numbers
|
||||
"""
|
||||
self.logger.debug("calculate_incidents: {}".format(pformat(incidents)))
|
||||
self.logger.debug(f"calculate_incidents: {pformat(incidents)}")
|
||||
l_incidents = []
|
||||
for kind, prj in incidents.items():
|
||||
packages = osc.core.meta_get_packagelist(self.apiurl, prj)
|
||||
@ -137,13 +137,13 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
src_prjs = {a.src_project for a in req_.actions}
|
||||
if SUSEUpdate.kgraft_target(self.apiurl, src_prjs.pop()):
|
||||
self.logger.debug(
|
||||
"calculate_incidents: Incident is kgraft - {} ".format(incident))
|
||||
f"calculate_incidents: Incident is kgraft - {incident} ")
|
||||
continue
|
||||
|
||||
incidents.append(incident)
|
||||
|
||||
l_incidents.append((kind + '_TEST_ISSUES', ','.join(incidents)))
|
||||
self.logger.debug("Calculate incidents:{}".format(pformat(l_incidents)))
|
||||
self.logger.debug(f"Calculate incidents:{pformat(l_incidents)}")
|
||||
return l_incidents
|
||||
|
||||
def jobs_for_target(self, data, build=None):
|
||||
@ -160,7 +160,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
values['build'] = build
|
||||
else:
|
||||
values['test'] = data['test']
|
||||
self.logger.debug("Get jobs: {}".format(pformat(values)))
|
||||
self.logger.debug(f"Get jobs: {pformat(values)}")
|
||||
return self.openqa.openqa_request('GET', 'jobs', values)['jobs']
|
||||
|
||||
# we don't know the current BUILD and querying all jobs is too expensive
|
||||
@ -173,7 +173,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
try:
|
||||
repohash = self.calculate_repo_hash(data['repos'], self.incident_repos.get(prj, {}))
|
||||
except HTTPError as e:
|
||||
self.logger.debug("REPOHASH not calculated with response {}".format(e))
|
||||
self.logger.debug(f"REPOHASH not calculated with response {e}")
|
||||
return
|
||||
|
||||
buildnr = None
|
||||
@ -202,7 +202,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
buildnr = "{!s}-{:d}".format(today, buildnr + 1)
|
||||
buildnr = f"{today!s}-{buildnr + 1:d}"
|
||||
|
||||
s = data['settings']
|
||||
# now schedule it for real
|
||||
@ -212,10 +212,10 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
s['BUILD'] = buildnr
|
||||
s['REPOHASH'] = repohash
|
||||
s['_OBSOLETE'] = '1'
|
||||
self.logger.debug("Prepared: {}".format(pformat(s)))
|
||||
self.logger.debug(f"Prepared: {pformat(s)}")
|
||||
if not self.dryrun:
|
||||
try:
|
||||
self.logger.info("Openqa isos POST {}".format(pformat(s)))
|
||||
self.logger.info(f"Openqa isos POST {pformat(s)}")
|
||||
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
@ -250,7 +250,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
if incident_id in value.split(','):
|
||||
foundissue = True
|
||||
if not foundissue:
|
||||
self.logger.info("Repo job {} not for {} - ignoring".format(job['id'], incident_id))
|
||||
self.logger.info(f"Repo job {job['id']} not for {incident_id} - ignoring")
|
||||
return jobs, QA_INPROGRESS
|
||||
# print(foundissue, incident_id, json.dumps(job['settings'], indent=4))
|
||||
|
||||
@ -296,14 +296,14 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
|
||||
@staticmethod
|
||||
def get_step_url(testurl, modulename):
|
||||
failurl = testurl + '/modules/{!s}/fails'.format(quote_plus(modulename))
|
||||
failurl = testurl + f'/modules/{quote_plus(modulename)!s}/fails'
|
||||
fails = requests.get(failurl).json()
|
||||
failed_step = fails.get('first_failed_step', 1)
|
||||
return "[{!s}]({!s}#step/{!s}/{:d})".format(OpenQABot.emd(modulename), testurl, modulename, failed_step)
|
||||
return f"[{OpenQABot.emd(modulename)!s}]({testurl!s}#step/{modulename!s}/{failed_step:d})"
|
||||
|
||||
@staticmethod
|
||||
def job_test_name(job):
|
||||
return "{!s}@{!s}".format(OpenQABot.emd(job['settings']['TEST']), OpenQABot.emd(job['settings']['MACHINE']))
|
||||
return f"{OpenQABot.emd(job['settings']['TEST'])!s}@{OpenQABot.emd(job['settings']['MACHINE'])!s}"
|
||||
|
||||
def summarize_one_openqa_job(self, job):
|
||||
testurl = osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])])
|
||||
@ -311,7 +311,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
rstring = job['result']
|
||||
if rstring == 'none':
|
||||
return None
|
||||
return '\n- [{!s}]({!s}) is {!s}'.format(self.job_test_name(job), testurl, rstring)
|
||||
return f'\n- [{self.job_test_name(job)!s}]({testurl!s}) is {rstring!s}'
|
||||
|
||||
modstrings = []
|
||||
for module in job['modules']:
|
||||
@ -320,15 +320,15 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
modstrings.append(self.get_step_url(testurl, module['name']))
|
||||
|
||||
if modstrings:
|
||||
return '\n- [{!s}]({!s}) failed in {!s}'.format(self.job_test_name(job), testurl, ','.join(modstrings))
|
||||
return f"\n- [{self.job_test_name(job)!s}]({testurl!s}) failed in {','.join(modstrings)!s}"
|
||||
elif job['result'] == 'failed': # rare case: fail without module fails
|
||||
return '\n- [{!s}]({!s}) failed'.format(self.job_test_name(job), testurl)
|
||||
return f'\n- [{self.job_test_name(job)!s}]({testurl!s}) failed'
|
||||
return ''
|
||||
|
||||
def summarize_openqa_jobs(self, jobs):
|
||||
groups = {}
|
||||
for job in jobs:
|
||||
gl = "{!s}@{!s}".format(self.emd(job['group']), self.emd(job['settings']['FLAVOR']))
|
||||
gl = f"{self.emd(job['group'])!s}@{self.emd(job['settings']['FLAVOR'])!s}"
|
||||
if gl not in groups:
|
||||
groupurl = osc.core.makeurl(self.openqa.baseurl, ['tests', 'overview'],
|
||||
{'version': job['settings']['VERSION'],
|
||||
@ -337,7 +337,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
'distri': job['settings']['DISTRI'],
|
||||
'build': job['settings']['BUILD'],
|
||||
})
|
||||
groups[gl] = {'title': "__Group [{!s}]({!s})__\n".format(gl, groupurl),
|
||||
groups[gl] = {'title': f"__Group [{gl!s}]({groupurl!s})__\n",
|
||||
'passed': 0, 'unfinished': 0, 'failed': []}
|
||||
|
||||
job_summary = self.summarize_one_openqa_job(job)
|
||||
@ -360,11 +360,11 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
msg += "\n\n" + groups[group]['title']
|
||||
infos = []
|
||||
if groups[group]['passed']:
|
||||
infos.append("{:d} tests passed".format(groups[group]['passed']))
|
||||
infos.append(f"{groups[group]['passed']:d} tests passed")
|
||||
if len(groups[group]['failed']):
|
||||
infos.append("{:d} tests failed".format(len(groups[group]['failed'])))
|
||||
infos.append(f"{len(groups[group]['failed']):d} tests failed")
|
||||
if groups[group]['unfinished']:
|
||||
infos.append("{:d} unfinished tests".format(groups[group]['unfinished']))
|
||||
infos.append(f"{groups[group]['unfinished']:d} unfinished tests")
|
||||
msg += "(" + ', '.join(infos) + ")\n"
|
||||
for fail in groups[group]['failed']:
|
||||
msg += fail
|
||||
@ -382,7 +382,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
self.comment_write(state='done', message=msg, request=req, result='accepted')
|
||||
return True
|
||||
else:
|
||||
self.logger.debug("request {} waits for build".format(req.reqid))
|
||||
self.logger.debug(f"request {req.reqid} waits for build")
|
||||
elif qa_state == QA_FAILED or qa_state == QA_PASSED:
|
||||
if qa_state == QA_PASSED:
|
||||
msg = "openQA tests passed\n"
|
||||
@ -477,7 +477,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
else:
|
||||
posts += self.check_product_arch(job, product_prefix, pmap, None)
|
||||
|
||||
self.logger.debug("Pmap: {} Posts: {}".format(pmap, posts))
|
||||
self.logger.debug(f"Pmap: {pmap} Posts: {posts}")
|
||||
return posts
|
||||
|
||||
def incident_openqa_jobs(self, s):
|
||||
@ -533,7 +533,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
def check_suse_incidents(self):
|
||||
self.wait_for_build = set()
|
||||
for inc in requests.get('https://maintenance.suse.de/api/incident/active/').json():
|
||||
self.logger.info("Incident number: {}".format(inc))
|
||||
self.logger.info(f"Incident number: {inc}")
|
||||
|
||||
mesh_job = requests.get('https://maintenance.suse.de/api/incident/' + inc).json()
|
||||
|
||||
@ -543,18 +543,18 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
self.test_job(mesh_job['base'])
|
||||
|
||||
def test_job(self, mesh_job):
|
||||
self.logger.debug("Called test_job with: {}".format(mesh_job))
|
||||
self.logger.debug(f"Called test_job with: {mesh_job}")
|
||||
incident_project = str(mesh_job['project'])
|
||||
try:
|
||||
comment_info = self.find_obs_request_comment(project_name=incident_project)
|
||||
except HTTPError as e:
|
||||
self.logger.debug("Couldn't load comments - {}".format(e))
|
||||
self.logger.debug(f"Couldn't load comments - {e}")
|
||||
return
|
||||
comment_build = str(comment_info.get('revision', ''))
|
||||
|
||||
openqa_posts = []
|
||||
for prod in self.api_map.keys():
|
||||
self.logger.debug("{} -- product in apimap".format(prod))
|
||||
self.logger.debug(f"{prod} -- product in apimap")
|
||||
openqa_posts += self.check_product(mesh_job, prod)
|
||||
openqa_jobs = []
|
||||
for s in openqa_posts:
|
||||
@ -565,13 +565,13 @@ class OpenQABot(ReviewBot.ReviewBot):
|
||||
# take the project comment as marker for not posting jobs
|
||||
if not len(jobs) and comment_build != str(mesh_job['openqa_build']):
|
||||
if self.dryrun:
|
||||
self.logger.info('WOULD POST:{}'.format(pformat(json.dumps(s, sort_keys=True))))
|
||||
self.logger.info(f'WOULD POST:{pformat(json.dumps(s, sort_keys=True))}')
|
||||
else:
|
||||
self.logger.info("Posted: {}".format(pformat(json.dumps(s, sort_keys=True))))
|
||||
self.logger.info(f"Posted: {pformat(json.dumps(s, sort_keys=True))}")
|
||||
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
||||
openqa_jobs += self.incident_openqa_jobs(s)
|
||||
else:
|
||||
self.logger.info("{} got {}".format(pformat(s), len(jobs)))
|
||||
self.logger.info(f"{pformat(s)} got {len(jobs)}")
|
||||
openqa_jobs += jobs
|
||||
|
||||
self.openqa_jobs[incident_project] = openqa_jobs
|
||||
|
@ -63,6 +63,6 @@ class openSUSEUpdate(Update):
|
||||
packages = self.packages(src_prj, dst_prj)
|
||||
settings['INSTALL_PACKAGES'] = ' '.join(packages.keys())
|
||||
settings['VERIFY_PACKAGE_VERSIONS'] = ' '.join(
|
||||
['{} {}-{}'.format(p.name, p.version, p.release) for p in packages.values()])
|
||||
[f'{p.name} {p.version}-{p.release}' for p in packages.values()])
|
||||
|
||||
return [settings]
|
||||
|
@ -89,5 +89,5 @@ class SUSEUpdate(Update):
|
||||
return []
|
||||
settings += self.add_minimal_settings(src_prj, settings[0])
|
||||
settings += self.add_kernel_settings(settings[0])
|
||||
self.logger.debug("settings are: {}".format(settings))
|
||||
self.logger.debug(f"settings are: {settings}")
|
||||
return settings
|
||||
|
@ -21,13 +21,13 @@ class Update(object):
|
||||
def get_max_revision(self, job):
|
||||
repo = self.repo_prefix + '/'
|
||||
repo += self.maintenance_project.replace(':', ':/')
|
||||
repo += ':/{!s}'.format(job['id'])
|
||||
repo += f":/{job['id']!s}"
|
||||
max_revision = 0
|
||||
for channel in job['channels']:
|
||||
crepo = repo + '/' + channel.replace(':', '_')
|
||||
xml = requests.get(crepo + '/repodata/repomd.xml')
|
||||
if not xml.ok:
|
||||
self.logger.info("{} skipped .. need wait".format(crepo))
|
||||
self.logger.info(f"{crepo} skipped .. need wait")
|
||||
# if one fails, we skip it and wait
|
||||
return False
|
||||
root = ET.fromstring(bytes(xml.text, encoding='utf-8'))
|
||||
@ -45,7 +45,7 @@ class Update(object):
|
||||
s['BUILD'] = ':' + build
|
||||
name = self.incident_name(src_prj)
|
||||
repo = dst_prj.replace(':', '_')
|
||||
repo = '{!s}/{!s}/{!s}/'.format(self.repo_prefix, src_prj.replace(':', ':/'), repo)
|
||||
repo = f"{self.repo_prefix!s}/{src_prj.replace(':', ':/')!s}/{repo!s}/"
|
||||
patch_id = self.patch_id(repo)
|
||||
if not patch_id and self.opensuse:
|
||||
# hot fix for openSUSE
|
||||
|
@ -86,7 +86,7 @@ class OriginManager(ReviewBot.ReviewBot):
|
||||
def check_source_submission(self, src_project, src_package, src_rev, tgt_project, tgt_package) -> Optional[bool]:
|
||||
kind = package_kind(self.apiurl, tgt_project, tgt_package)
|
||||
if not (kind is None or kind == 'source'):
|
||||
self.review_messages['accepted'] = 'skipping {} package since not source'.format(kind)
|
||||
self.review_messages['accepted'] = f'skipping {kind} package since not source'
|
||||
return True
|
||||
|
||||
advance, result = self.config_validate(tgt_project)
|
||||
@ -172,7 +172,7 @@ class OriginManager(ReviewBot.ReviewBot):
|
||||
|
||||
override, who = self.devel_project_simulate_check_command(source_project, target_project)
|
||||
if override:
|
||||
return override, 'change_devel command by {}'.format(who)
|
||||
return override, f'change_devel command by {who}'
|
||||
|
||||
return False, None
|
||||
|
||||
@ -244,7 +244,7 @@ class OriginManager(ReviewBot.ReviewBot):
|
||||
def policy_result_comment_add(self, project, package, comments):
|
||||
message = '\n\n'.join(comments)
|
||||
if len(self.request.actions) > 1:
|
||||
message = '## {}/{}\n\n{}'.format(project, package, message)
|
||||
message = f'## {project}/{package}\n\n{message}'
|
||||
suffix = '::'.join([project, package])
|
||||
else:
|
||||
suffix = None
|
||||
|
@ -28,10 +28,10 @@ def do_cycle(self, subcmd, opts, *args):
|
||||
deps = ET.fromstring(get_dependson(apiurl, opts.project, opts.repository, opts.arch, [pkgname]))
|
||||
|
||||
pkg = deps.find('package')
|
||||
print("\"%s\"" % pkgname)
|
||||
print(f"\"{pkgname}\"")
|
||||
for deps in pkg.findall('pkgdep'):
|
||||
if deps.text in args:
|
||||
print("\"%s\" -> \"%s\"" % (deps.text, pkgname))
|
||||
print(f"\"{deps.text}\" -> \"{pkgname}\"")
|
||||
except HTTPError:
|
||||
# Ignore packages that do not exist
|
||||
print("[color=red]")
|
||||
|
@ -75,7 +75,7 @@ def do_origin(self, subcmd, opts, *args):
|
||||
command = args[0]
|
||||
if command not in ['config', 'cron', 'history', 'list', 'package', 'potentials',
|
||||
'projects', 'report', 'update']:
|
||||
raise oscerr.WrongArgs('Unknown command: {}'.format(command))
|
||||
raise oscerr.WrongArgs(f'Unknown command: {command}')
|
||||
if command == 'package' and len(args) < 2:
|
||||
raise oscerr.WrongArgs('A package must be indicated.')
|
||||
|
||||
@ -93,9 +93,9 @@ def do_origin(self, subcmd, opts, *args):
|
||||
raise oscerr.WrongArgs('A project must be indicated.')
|
||||
config = config_load(apiurl, opts.project)
|
||||
if not config:
|
||||
raise oscerr.WrongArgs('OSRT:OriginConfig attribute missing from {}'.format(opts.project))
|
||||
raise oscerr.WrongArgs(f'OSRT:OriginConfig attribute missing from {opts.project}')
|
||||
|
||||
function = 'osrt_origin_{}'.format(command)
|
||||
function = f'osrt_origin_{command}'
|
||||
globals()[function](apiurl, opts, *args[1:])
|
||||
|
||||
|
||||
@ -118,12 +118,12 @@ def osrt_origin_cron(apiurl, opts, *args):
|
||||
if os.path.exists(lookup_path):
|
||||
# Update the last accessed time to avoid cache manager culling.
|
||||
os.utime(lookup_path, (time.time(), os.stat(lookup_path).st_mtime))
|
||||
print('{}<locked> lookup preserved'.format(project))
|
||||
print(f'{project}<locked> lookup preserved')
|
||||
continue
|
||||
|
||||
# Force update lookup information.
|
||||
lookup = osrt_origin_lookup(apiurl, project, force_refresh=True, quiet=True)
|
||||
print('{} lookup updated for {} package(s)'.format(project, len(lookup)))
|
||||
print(f'{project} lookup updated for {len(lookup)} package(s)')
|
||||
|
||||
|
||||
def osrt_origin_dump(format, data):
|
||||
@ -133,7 +133,7 @@ def osrt_origin_dump(format, data):
|
||||
print(yaml.dump(data))
|
||||
else:
|
||||
if format != 'plain':
|
||||
print('unknown format: {}'.format(format), file=sys.stderr)
|
||||
print(f'unknown format: {format}', file=sys.stderr)
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -203,7 +203,7 @@ def osrt_origin_lookup(apiurl, project, force_refresh=False, previous=False, qui
|
||||
|
||||
if not previous and not quiet:
|
||||
dt = timedelta(seconds=time.time() - os.stat(lookup_path).st_mtime)
|
||||
print('# generated {} ago'.format(dt), file=sys.stderr)
|
||||
print(f'# generated {dt} ago', file=sys.stderr)
|
||||
|
||||
return lookup
|
||||
|
||||
@ -353,7 +353,7 @@ def osrt_origin_report(apiurl, opts, *args):
|
||||
print(body)
|
||||
|
||||
if opts.mail:
|
||||
mail_send(apiurl, opts.project, 'release-list', '{} origin report'.format(opts.project),
|
||||
mail_send(apiurl, opts.project, 'release-list', f'{opts.project} origin report',
|
||||
body, None, dry=opts.dry)
|
||||
|
||||
|
||||
@ -369,7 +369,7 @@ def osrt_origin_update(apiurl, opts, *packages):
|
||||
packages = osrt_origin_update_packages(apiurl, opts.project)
|
||||
|
||||
for package in packages:
|
||||
print('checking for updates to {}/{}...'.format(opts.project, package))
|
||||
print(f'checking for updates to {opts.project}/{package}...')
|
||||
|
||||
request_future = origin_update(apiurl, opts.project, package)
|
||||
if request_future:
|
||||
|
@ -52,14 +52,14 @@ def do_pcheck(self, subcmd, opts, project):
|
||||
continue
|
||||
elif sinfo.find('linked') is not None:
|
||||
elm = sinfo.find('linked')
|
||||
key = '%s/%s' % (elm.get('project'), elm.get('package'))
|
||||
key = f"{elm.get('project')}/{elm.get('package')}"
|
||||
pmap.setdefault(key, []).append(pkg)
|
||||
todo.setdefault(elm.get('project'), []).append(elm.get('package'))
|
||||
md5s[pkg] = sinfo.get('verifymd5')
|
||||
for prj, pkgs in todo.items():
|
||||
sinfos = osc.core.get_project_sourceinfo(apiurl, prj, True, *pkgs)
|
||||
for pkg, sinfo in sinfos.items():
|
||||
key = '%s/%s' % (prj, pkg)
|
||||
key = f'{prj}/{pkg}'
|
||||
for p in pmap[key]:
|
||||
vmd5 = md5s.pop(p)
|
||||
if vmd5 == sinfo.get('verifymd5'):
|
||||
@ -74,24 +74,24 @@ def do_pcheck(self, subcmd, opts, project):
|
||||
if opts.message:
|
||||
message = opts.message
|
||||
else:
|
||||
message = "Scripted push from {project}".format(project=project)
|
||||
message = f"Scripted push from {project}"
|
||||
api.create(project=project, package=p, target=prj, message=message)
|
||||
|
||||
overview = 'Overview of project {}'.format(project)
|
||||
overview = f'Overview of project {project}'
|
||||
print()
|
||||
print(overview)
|
||||
print('=' * len(overview))
|
||||
print('Changed & unsubmitted packages: %d' % len(changed))
|
||||
print(f'Changed & unsubmitted packages: {len(changed)}')
|
||||
print(', '.join(changed))
|
||||
print()
|
||||
print('Changed & submitted packages: %d' % len(changeSRed.keys()))
|
||||
print(', '.join(['%s(%s)' % (pkg, SR) for pkg, SR in changeSRed.items()]))
|
||||
print(f'Changed & submitted packages: {len(changeSRed.keys())}')
|
||||
print(', '.join([f'{pkg}({SR})' for pkg, SR in changeSRed.items()]))
|
||||
print()
|
||||
print('Packages without link: %d' % len(md5s.keys()))
|
||||
print(f'Packages without link: {len(md5s.keys())}')
|
||||
print(', '.join(md5s.keys()))
|
||||
print()
|
||||
print('Packages with errors: %d' % len(errors.keys()))
|
||||
print('\n'.join(['%s: %s' % (p, err) for p, err in errors.items()]))
|
||||
print(f'Packages with errors: {len(errors.keys())}')
|
||||
print('\n'.join([f'{p}: {err}' for p, err in errors.items()]))
|
||||
|
||||
|
||||
class oscapi:
|
||||
@ -110,7 +110,7 @@ class oscapi:
|
||||
|
||||
def create(self, project, package, target, message):
|
||||
currev = osc.core.get_source_rev(self.apiurl, project, package)['rev']
|
||||
print("Creating a request from {project}/{package}".format(project=project, package=package))
|
||||
print(f"Creating a request from {project}/{package}")
|
||||
query = {'cmd': 'create'}
|
||||
url = osc.core.makeurl(self.apiurl, ['request'], query=query)
|
||||
|
||||
|
@ -54,14 +54,14 @@ def _full_project_name(self, project):
|
||||
return project
|
||||
|
||||
if project.startswith('Factory'):
|
||||
return 'openSUSE:%s' % project
|
||||
return f'openSUSE:{project}'
|
||||
|
||||
if project.startswith('SLE') or project.startswith('ALP'):
|
||||
return 'SUSE:%s' % project
|
||||
return f'SUSE:{project}'
|
||||
|
||||
# If we can't guess, raise a Warning
|
||||
if (':' not in project):
|
||||
warnings.warn('%s project not recognized.' % project)
|
||||
warnings.warn(f'{project} project not recognized.')
|
||||
return project
|
||||
|
||||
|
||||
@ -360,7 +360,7 @@ def do_staging(self, subcmd, opts, *args):
|
||||
):
|
||||
min_args, max_args = 0, 0
|
||||
else:
|
||||
raise oscerr.WrongArgs('Unknown command: %s' % cmd)
|
||||
raise oscerr.WrongArgs(f'Unknown command: {cmd}')
|
||||
args = clean_args(args)
|
||||
if len(args) - 1 < min_args:
|
||||
raise oscerr.WrongArgs('Too few arguments.')
|
||||
@ -551,7 +551,7 @@ def do_staging(self, subcmd, opts, *args):
|
||||
return
|
||||
|
||||
for group, info in sorted(proposal.items()):
|
||||
print('Staging {} in {}'.format(group, info['staging']))
|
||||
print(f"Staging {group} in {info['staging']}")
|
||||
|
||||
# SelectCommand expects strings.
|
||||
request_ids = map(str, info['requests'].keys())
|
||||
|
@ -58,7 +58,7 @@ class PubSubConsumer(object):
|
||||
|
||||
def still_alive(self):
|
||||
# output something so gocd doesn't consider it stalled
|
||||
self.logger.info('Still alive: {}'.format(datetime.now().time()))
|
||||
self.logger.info(f'Still alive: {datetime.now().time()}')
|
||||
if self._run_until and time.time() > self._run_until:
|
||||
self.stop()
|
||||
else:
|
||||
|
@ -61,7 +61,7 @@ class AcceptCommand(object):
|
||||
if link['project'] in self.api.rings or link['project'] == self.api.project:
|
||||
print(f"delete {link['project']}/{link['package']}")
|
||||
delete_package(self.api.apiurl, link['project'], link['package'],
|
||||
msg="remove link while accepting delete of {}".format(package))
|
||||
msg=f"remove link while accepting delete of {package}")
|
||||
|
||||
def check_request_for_bugowner(self, to_request, package, id):
|
||||
url = self.api.makeurl(['request', str(id)])
|
||||
@ -103,7 +103,7 @@ class AcceptCommand(object):
|
||||
if accept_all_green:
|
||||
continue
|
||||
if not force:
|
||||
print('The project "{}" is not yet acceptable.'.format(project))
|
||||
print(f'The project "{project}" is not yet acceptable.')
|
||||
return False
|
||||
|
||||
staging_packages[project] = []
|
||||
@ -137,7 +137,7 @@ class AcceptCommand(object):
|
||||
|
||||
for req in other_new:
|
||||
print(f"Accepting request {req['id']}: {req['package']}")
|
||||
change_request_state(self.api.apiurl, str(req['id']), 'accepted', message='Accept to %s' % self.api.project)
|
||||
change_request_state(self.api.apiurl, str(req['id']), 'accepted', message=f'Accept to {self.api.project}')
|
||||
|
||||
for project in sorted(staging_packages.keys()):
|
||||
print(f'waiting for staging project {project} to be accepted')
|
||||
@ -146,7 +146,7 @@ class AcceptCommand(object):
|
||||
status = self.api.project_status(project, reload=True)
|
||||
if status.get('state') == 'empty':
|
||||
break
|
||||
print('{} requests still staged - waiting'.format(status.find('staged_requests').get('count')))
|
||||
print(f"{status.find('staged_requests').get('count')} requests still staged - waiting")
|
||||
time.sleep(1)
|
||||
|
||||
self.api.accept_status_comment(project, staging_packages[project])
|
||||
@ -185,7 +185,7 @@ class AcceptCommand(object):
|
||||
clean_list = set(pkglist) - set(self.api.cnocleanup_packages)
|
||||
|
||||
for package in clean_list:
|
||||
print("[cleanup] deleted %s/%s" % (project, package))
|
||||
print(f"[cleanup] deleted {project}/{package}")
|
||||
delete_package(self.api.apiurl, project, package, force=True, msg="autocleanup")
|
||||
|
||||
return
|
||||
|
@ -40,12 +40,12 @@ class AdiCommand:
|
||||
review.get('request')))
|
||||
return
|
||||
for check in info.findall('missing_checks/check'):
|
||||
print(query_project + ' ' + Fore.MAGENTA + 'missing: {}'.format(check.get('name')))
|
||||
print(query_project + ' ' + Fore.MAGENTA + f"missing: {check.get('name')}")
|
||||
return
|
||||
for check in info.findall('checks/check'):
|
||||
state = check.find('state').text
|
||||
if state != 'success':
|
||||
print(query_project + '{} {} check: {}'.format(Fore.MAGENTA, state, check.get('name')))
|
||||
print(query_project + f"{Fore.MAGENTA} {state} check: {check.get('name')}")
|
||||
return
|
||||
|
||||
overall_state = info.get('state')
|
||||
@ -59,7 +59,7 @@ class AdiCommand:
|
||||
|
||||
ready = []
|
||||
for req in info.findall('staged_requests/request'):
|
||||
ready.append('{}[{}]'.format(Fore.CYAN + req.get('package') + Fore.RESET, req.get('id')))
|
||||
ready.append(f"{Fore.CYAN + req.get('package') + Fore.RESET}[{req.get('id')}]")
|
||||
if len(ready):
|
||||
print(query_project, Fore.GREEN + 'ready:', ', '.join(ready))
|
||||
|
||||
@ -98,7 +98,7 @@ class AdiCommand:
|
||||
request_id = int(request.get('id'))
|
||||
target = request.find('./action/target')
|
||||
target_package = target.get('package')
|
||||
line = '- {} {}{:<30}{}'.format(request_id, Fore.CYAN, target_package, Fore.RESET)
|
||||
line = f'- {request_id} {Fore.CYAN}{target_package:<30}{Fore.RESET}'
|
||||
|
||||
message = self.api.ignore_format(request_id)
|
||||
if message:
|
||||
@ -108,7 +108,7 @@ class AdiCommand:
|
||||
# Auto-superseding request in adi command
|
||||
stage_info, code = self.api.update_superseded_request(request)
|
||||
if stage_info:
|
||||
print(line + ' ({})'.format(SupersedeCommand.CODE_MAP[code]))
|
||||
print(line + f' ({SupersedeCommand.CODE_MAP[code]})')
|
||||
continue
|
||||
|
||||
# Only create staging projec the first time a non superseded
|
||||
@ -120,7 +120,7 @@ class AdiCommand:
|
||||
if not self.api.rq_to_prj(request_id, name):
|
||||
return False
|
||||
|
||||
print(line + Fore.GREEN + ' (staged in {})'.format(name) + Fore.RESET)
|
||||
print(line + Fore.GREEN + f' (staged in {name})' + Fore.RESET)
|
||||
|
||||
def perform(self, packages, move=False, split=False):
|
||||
"""
|
||||
|
@ -15,23 +15,23 @@ class CheckCommand(object):
|
||||
# Check for superseded requests
|
||||
for r in project.findall('obsolete_requests/*'):
|
||||
if r.get('state') == 'superseded':
|
||||
report.extend(' - Request %s is superseded by %s' % (r.get('id'), r.get('superseded_by')))
|
||||
report.extend(f" - Request {r.get('id')} is superseded by {r.get('superseded_by')}")
|
||||
|
||||
# Untracked requests
|
||||
for r in project.findall('untracked_requests/*'):
|
||||
report.extend(' - Request %s is no tracked but is open for the project' % r.get('id'))
|
||||
report.extend(f" - Request {r.get('id')} is no tracked but is open for the project")
|
||||
|
||||
# Status of obsolete requests
|
||||
for r in project.findall('obsolete_requests/*'):
|
||||
if r.get('state') == 'superseded':
|
||||
continue
|
||||
report.append(' - %s: %s' % (r.get('package'), r.get('state')))
|
||||
report.append(f" - {r.get('package')}: {r.get('state')}")
|
||||
if not verbose:
|
||||
break
|
||||
|
||||
# Missing reviews
|
||||
for r in project.findall('missing_reviews/review'):
|
||||
report.append(' - %s: Missing reviews: %s' % (r.get('package'), self.api.format_review(r)))
|
||||
report.append(f" - {r.get('package')}: Missing reviews: {self.api.format_review(r)}")
|
||||
if not verbose:
|
||||
break
|
||||
|
||||
@ -39,7 +39,7 @@ class CheckCommand(object):
|
||||
if project.find('building_repositories/repo') is not None:
|
||||
report.append(' - At least following repositories are still building:')
|
||||
for r in project.findall('building_repositories/*'):
|
||||
report.append(' %s/%s: %s' % (r.get('repository'), r.get('arch'), r.get('state')))
|
||||
report.append(f" {r.get('repository')}/{r.get('arch')}: {r.get('state')}")
|
||||
if not verbose:
|
||||
break
|
||||
|
||||
@ -47,7 +47,7 @@ class CheckCommand(object):
|
||||
if project.find('broken_packages/package') is not None:
|
||||
report.append(' - Following packages are broken:')
|
||||
for r in project.findall('broken_packages/package'):
|
||||
report.append(' %s (%s): %s' % (r.get('package'), r.get('repository'), r.get('state')))
|
||||
report.append(f" {r.get('package')} ({r.get('repository')}): {r.get('state')}")
|
||||
if not verbose:
|
||||
break
|
||||
|
||||
@ -58,7 +58,7 @@ class CheckCommand(object):
|
||||
for check in project.findall('checks/*'):
|
||||
state = check.find('state').text
|
||||
if state != 'success':
|
||||
info = " - %s check: %s" % (state, check.get('name'))
|
||||
info = f" - {state} check: {check.get('name')}"
|
||||
url = check.find('url')
|
||||
if url is not None:
|
||||
info += " " + url.text
|
||||
@ -66,7 +66,7 @@ class CheckCommand(object):
|
||||
break
|
||||
|
||||
if project.get('state') == 'acceptable':
|
||||
report.insert(0, ' ++ Acceptable staging project %s' % project.get('name'))
|
||||
report.insert(0, f" ++ Acceptable staging project {project.get('name')}")
|
||||
elif project.get('state') != 'empty':
|
||||
report.insert(0, ' -- %s Project %s still needs attention' % (project.get('state').upper(),
|
||||
project.get('name')))
|
||||
|
@ -35,7 +35,7 @@ class CleanupRings(object):
|
||||
|
||||
def perform(self):
|
||||
for index, ring in enumerate(self.api.rings):
|
||||
print('# {}'.format(ring))
|
||||
print(f'# {ring}')
|
||||
ring_next = self.api.rings[index + 1] if index + 1 < len(self.api.rings) else None
|
||||
self.check_depinfo_ring(ring, ring_next)
|
||||
|
||||
@ -53,31 +53,31 @@ class CleanupRings(object):
|
||||
links = si.findall('linked')
|
||||
pkg = si.get('package')
|
||||
if links is None or len(links) == 0:
|
||||
print('# {} not a link'.format(pkg))
|
||||
print(f'# {pkg} not a link')
|
||||
else:
|
||||
linked = links[0]
|
||||
dprj = linked.get('project')
|
||||
dpkg = linked.get('package')
|
||||
if dprj != self.api.project:
|
||||
if not dprj.startswith(self.api.crings):
|
||||
print("#{} not linking to base {} but {}".format(pkg, self.api.project, dprj))
|
||||
print(f"#{pkg} not linking to base {self.api.project} but {dprj}")
|
||||
self.links[pkg] = dpkg
|
||||
# multi spec package must link to ring
|
||||
elif len(links) > 1:
|
||||
mainpkg = links[1].get('package')
|
||||
mainprj = links[1].get('project')
|
||||
if mainprj != self.api.project:
|
||||
print('# FIXME: {} links to {}'.format(pkg, mainprj))
|
||||
print(f'# FIXME: {pkg} links to {mainprj}')
|
||||
else:
|
||||
destring = None
|
||||
if mainpkg in self.api.ring_packages:
|
||||
destring = self.api.ring_packages[mainpkg]
|
||||
if not destring:
|
||||
print('# {} links to {} but is not in a ring'.format(pkg, mainpkg))
|
||||
print("osc linkpac {}/{} {}/{}".format(mainprj, mainpkg, prj, mainpkg))
|
||||
print(f'# {pkg} links to {mainpkg} but is not in a ring')
|
||||
print(f"osc linkpac {mainprj}/{mainpkg} {prj}/{mainpkg}")
|
||||
else:
|
||||
if pkg != 'glibc.i686': # FIXME: ugly exception
|
||||
print("osc linkpac -f {}/{} {}/{}".format(destring, mainpkg, prj, pkg))
|
||||
print(f"osc linkpac -f {destring}/{mainpkg} {prj}/{pkg}")
|
||||
self.links[pkg] = mainpkg
|
||||
|
||||
def fill_pkginfo(self, prj, repo, arch):
|
||||
@ -94,7 +94,7 @@ class CleanupRings(object):
|
||||
if self.bin2src[subpkg] == name:
|
||||
# different archs
|
||||
continue
|
||||
print('# Binary {} is defined twice: {} {}+{}'.format(subpkg, prj, name, self.bin2src[subpkg]))
|
||||
print(f'# Binary {subpkg} is defined twice: {prj} {name}+{self.bin2src[subpkg]}')
|
||||
self.bin2src[subpkg] = name
|
||||
|
||||
def repo_state_acceptable(self, project):
|
||||
@ -103,7 +103,7 @@ class CleanupRings(object):
|
||||
for repo in root.findall('result'):
|
||||
repostate = repo.get('state', 'missing')
|
||||
if repostate not in ['unpublished', 'published'] or repo.get('dirty', 'false') == 'true':
|
||||
print('Repo {}/{} is in state {}'.format(repo.get('project'), repo.get('repository'), repostate))
|
||||
print(f"Repo {repo.get('project')}/{repo.get('repository')} is in state {repostate}")
|
||||
return False
|
||||
for package in repo.findall('status'):
|
||||
code = package.get('code')
|
||||
@ -121,16 +121,16 @@ class CleanupRings(object):
|
||||
url = makeurl(self.api.apiurl, ['build', project, 'images', arch, dvd, '_buildinfo'])
|
||||
root = ET.parse(http_GET(url)).getroot()
|
||||
# Don't delete the image itself
|
||||
self.pkgdeps[dvd.split(':')[0]] = 'MYdvd{}'.format(self.api.rings.index(project))
|
||||
self.pkgdeps[dvd.split(':')[0]] = f'MYdvd{self.api.rings.index(project)}'
|
||||
for bdep in root.findall('bdep'):
|
||||
if 'name' not in bdep.attrib:
|
||||
continue
|
||||
b = bdep.attrib['name']
|
||||
if b not in self.bin2src:
|
||||
print("{} not found in bin2src".format(b))
|
||||
print(f"{b} not found in bin2src")
|
||||
continue
|
||||
b = self.bin2src[b]
|
||||
self.pkgdeps[b] = 'MYdvd{}'.format(self.api.rings.index(project))
|
||||
self.pkgdeps[b] = f'MYdvd{self.api.rings.index(project)}'
|
||||
|
||||
def check_buildconfig(self, project):
|
||||
url = makeurl(self.api.apiurl, ['build', project, 'standard', '_buildconfig'])
|
||||
@ -260,6 +260,6 @@ class CleanupRings(object):
|
||||
if ":" in source:
|
||||
self.commands.append(f"# Multibuild flavor {source} not needed")
|
||||
else:
|
||||
self.commands.append('osc rdelete -m cleanup {} {}'.format(prj, source))
|
||||
self.commands.append(f'osc rdelete -m cleanup {prj} {source}')
|
||||
if nextprj:
|
||||
self.commands.append('osc linkpac {} {} {}'.format(self.api.project, source, nextprj))
|
||||
self.commands.append(f'osc linkpac {self.api.project} {source} {nextprj}')
|
||||
|
@ -173,7 +173,7 @@ class CommentAPI(object):
|
||||
for key, value in info.items():
|
||||
infos.append('='.join((str(key), str(value))))
|
||||
|
||||
marker = '<!-- {}{} -->'.format(bot, ' ' + ' '.join(infos) if info else '')
|
||||
marker = f"<!-- {bot}{' ' + ' '.join(infos) if info else ''} -->"
|
||||
return marker + '\n\n' + comment
|
||||
|
||||
def remove_marker(self, comment):
|
||||
|
@ -77,7 +77,7 @@ def get_request_list_with_history(
|
||||
xpath = ''
|
||||
if 'all' not in req_state:
|
||||
for state in req_state:
|
||||
xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
|
||||
xpath = xpath_join(xpath, f'state/@name=\'{state}\'', inner=True)
|
||||
if req_who:
|
||||
xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
|
||||
|
||||
@ -95,12 +95,12 @@ def get_request_list_with_history(
|
||||
xpath = xpath_join(xpath, xpath_base % {'kind': kind, 'val': val}, op='and', nexpr_parentheses=True)
|
||||
|
||||
if req_type:
|
||||
xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
|
||||
xpath = xpath_join(xpath, f'action/@type=\'{req_type}\'', op='and')
|
||||
for i in exclude_target_projects:
|
||||
xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
|
||||
xpath = xpath_join(xpath, f'(not(action/target/@project=\'{i}\'))', op='and')
|
||||
|
||||
if conf.config['verbose'] > 1:
|
||||
print('[ %s ]' % xpath)
|
||||
print(f'[ {xpath} ]')
|
||||
queries = {}
|
||||
queries['request'] = {'withfullhistory': '1'}
|
||||
res = osc_core_search(apiurl, queries=queries, request=xpath)
|
||||
@ -175,9 +175,9 @@ def project_role_expand(apiurl, project, role='maintainer'):
|
||||
|
||||
|
||||
def meta_role_expand(apiurl, meta, role='maintainer'):
|
||||
users = meta.xpath('//person[@role="{}"]/@userid'.format(role))
|
||||
users = meta.xpath(f'//person[@role="{role}"]/@userid')
|
||||
|
||||
groups = meta.xpath('//group[@role="{}"]/@groupid'.format(role))
|
||||
groups = meta.xpath(f'//group[@role="{role}"]/@groupid')
|
||||
users.extend(groups_members(apiurl, groups))
|
||||
|
||||
return users
|
||||
@ -200,7 +200,7 @@ def package_list(apiurl, project, expand=True):
|
||||
@memoize(session=True)
|
||||
def target_archs(apiurl, project, repository='standard'):
|
||||
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
||||
return meta.xpath('repository[@name="{}"]/arch/text()'.format(repository))
|
||||
return meta.xpath(f'repository[@name="{repository}"]/arch/text()')
|
||||
|
||||
|
||||
@memoize(session=True)
|
||||
@ -323,7 +323,7 @@ def devel_project_fallback(apiurl, target_project, target_package):
|
||||
def devel_projects(apiurl, project):
|
||||
devel_projects = set()
|
||||
|
||||
root = search(apiurl, 'package', "@project='{}' and devel/@project!=''".format(project))
|
||||
root = search(apiurl, 'package', f"@project='{project}' and devel/@project!=''")
|
||||
for devel_project in root.xpath('package/devel/@project'):
|
||||
if devel_project != project:
|
||||
devel_projects.add(devel_project)
|
||||
@ -345,7 +345,7 @@ def request_age(request):
|
||||
|
||||
def project_list_prefix(apiurl, prefix):
|
||||
"""Get a list of project with the same prefix."""
|
||||
query = {'match': 'starts-with(@name, "{}")'.format(prefix)}
|
||||
query = {'match': f'starts-with(@name, "{prefix}")'}
|
||||
url = makeurl(apiurl, ['search', 'project', 'id'], query)
|
||||
root = ET.parse(http_GET(url)).getroot()
|
||||
return root.xpath('project/@name')
|
||||
@ -401,7 +401,7 @@ def entity_email(apiurl, key, entity_type='person', include_name=False):
|
||||
|
||||
realname = root.find('realname')
|
||||
if include_name and realname is not None:
|
||||
email = '{} <{}>'.format(realname.text, email)
|
||||
email = f'{realname.text} <{email}>'
|
||||
|
||||
return email
|
||||
|
||||
@ -506,8 +506,8 @@ def attribute_value_load(
|
||||
|
||||
raise e
|
||||
|
||||
xpath_base = './attribute[@namespace="{}" and @name="{}"]'.format(namespace, name)
|
||||
value = root.xpath('{}/value/text()'.format(xpath_base))
|
||||
xpath_base = f'./attribute[@namespace="{namespace}" and @name="{name}"]'
|
||||
value = root.xpath(f'{xpath_base}/value/text()')
|
||||
if not len(value):
|
||||
if root.xpath(xpath_base):
|
||||
# Handle boolean attributes that are present, but have no value.
|
||||
@ -563,7 +563,7 @@ def repository_path_expand(apiurl: str, project: str, repo: str, visited_repos:
|
||||
visited_repos = set()
|
||||
repos = [[project, repo]]
|
||||
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
||||
paths = meta.findall('.//repository[@name="{}"]/path'.format(repo))
|
||||
paths = meta.findall(f'.//repository[@name="{repo}"]/path')
|
||||
|
||||
# The listed paths are taken as-is, except for the last one...
|
||||
for path in paths[:-1]:
|
||||
@ -595,7 +595,7 @@ def repository_path_search(apiurl, project, search_project, search_repository):
|
||||
# Repositories for a single project are in a row so cache parsing.
|
||||
root = ET.fromstringlist(show_project_meta(apiurl, project))
|
||||
|
||||
paths = root.findall('repository[@name="{}"]/path'.format(repository))
|
||||
paths = root.findall(f'repository[@name="{repository}"]/path')
|
||||
for path in paths:
|
||||
if path.get('project') == search_project and path.get('repository') == search_repository:
|
||||
return repository_top
|
||||
@ -865,7 +865,7 @@ def package_version(apiurl, project, package):
|
||||
|
||||
|
||||
def project_attribute_list(apiurl, attribute, locked=None):
|
||||
xpath = 'attribute/@name="{}"'.format(attribute)
|
||||
xpath = f'attribute/@name="{attribute}"'
|
||||
root = search(apiurl, 'project', xpath)
|
||||
for project in root.xpath('project/@name'):
|
||||
# Locked not exposed via OBS xpath engine.
|
||||
@ -922,7 +922,7 @@ def project_remote_prefixed(apiurl, apiurl_remote, project):
|
||||
if remote_apiurl == apiurl_remote:
|
||||
return remote + ':' + project
|
||||
|
||||
raise Exception('remote APIURL interconnect not configured for{}'.format(apiurl_remote))
|
||||
raise Exception(f'remote APIURL interconnect not configured for{apiurl_remote}')
|
||||
|
||||
|
||||
def review_find_last(request, user, states=['all']):
|
||||
@ -978,7 +978,7 @@ def issue_tracker_by_url(apiurl: str, tracker_url: str) -> Optional[str]:
|
||||
if not tracker_url.endswith('/'):
|
||||
# All trackers are formatted with trailing slash.
|
||||
tracker_url += '/'
|
||||
return next(iter(root.xpath('issue-tracker[url[text()="{}"]]'.format(tracker_url)) or []), None)
|
||||
return next(iter(root.xpath(f'issue-tracker[url[text()="{tracker_url}"]]') or []), None)
|
||||
|
||||
|
||||
def issue_tracker_label_apply(tracker, identifier):
|
||||
@ -987,7 +987,7 @@ def issue_tracker_label_apply(tracker, identifier):
|
||||
|
||||
def request_remote_identifier(apiurl: str, apiurl_remote: str, request_id: str) -> str:
|
||||
if apiurl_remote == apiurl:
|
||||
return 'request#{}'.format(request_id)
|
||||
return f'request#{request_id}'
|
||||
|
||||
# The URL differences make this rather convoluted.
|
||||
tracker = issue_tracker_by_url(apiurl, apiurl_remote.replace('api.', 'build.'))
|
||||
@ -1085,7 +1085,7 @@ def request_action_list_maintenance_incident(apiurl, project, package, states=['
|
||||
# included in the search results. Overall, another prime example of design
|
||||
# done completely and utterly wrong.
|
||||
|
||||
package_repository = '{}.{}'.format(package, project.replace(':', '_'))
|
||||
package_repository = f"{package}.{project.replace(':', '_')}"
|
||||
|
||||
# Loop over all maintenance projects and create selectors for the two
|
||||
# request states for the given project.
|
||||
@ -1099,7 +1099,7 @@ def request_action_list_maintenance_incident(apiurl, project, package, states=['
|
||||
|
||||
xpath_project_package = ''
|
||||
xpath_project_package = xpath_join(
|
||||
xpath_project_package, 'action/source/@package="{}"'.format(package))
|
||||
xpath_project_package, f'action/source/@package="{package}"')
|
||||
xpath_project_package = xpath_join(
|
||||
xpath_project_package, 'action/source/@package="{}"'.format(
|
||||
package_repository), op='or', inner=True)
|
||||
@ -1117,12 +1117,12 @@ def request_action_list_maintenance_incident(apiurl, project, package, states=['
|
||||
|
||||
xpath = xpath_join(xpath, xpath_project, op='or', nexpr_parentheses=True)
|
||||
|
||||
xpath = '({})'.format(xpath)
|
||||
xpath = f'({xpath})'
|
||||
|
||||
if 'all' not in states:
|
||||
xpath_states = ''
|
||||
for state in states:
|
||||
xpath_states = xpath_join(xpath_states, 'state/@name="{}"'.format(state), inner=True)
|
||||
xpath_states = xpath_join(xpath_states, f'state/@name="{state}"', inner=True)
|
||||
xpath = xpath_join(xpath, xpath_states, op='and', nexpr_parentheses=True)
|
||||
|
||||
xpath = xpath_join(xpath, 'action/@type="maintenance_incident"', op='and')
|
||||
@ -1142,16 +1142,16 @@ def request_action_list_maintenance_incident(apiurl, project, package, states=['
|
||||
|
||||
|
||||
def request_action_list_maintenance_release(apiurl, project, package, states=['new', 'review']):
|
||||
package_repository = '{}.{}'.format(package, project.replace(':', '_'))
|
||||
package_repository = f"{package}.{project.replace(':', '_')}"
|
||||
|
||||
xpath = 'action/target/@project="{}"'.format(project)
|
||||
xpath = xpath_join(xpath, 'action/source/@package="{}"'.format(package_repository), op='and', inner=True)
|
||||
xpath = '({})'.format(xpath)
|
||||
xpath = f'action/target/@project="{project}"'
|
||||
xpath = xpath_join(xpath, f'action/source/@package="{package_repository}"', op='and', inner=True)
|
||||
xpath = f'({xpath})'
|
||||
|
||||
if 'all' not in states:
|
||||
xpath_states = ''
|
||||
for state in states:
|
||||
xpath_states = xpath_join(xpath_states, 'state/@name="{}"'.format(state), inner=True)
|
||||
xpath_states = xpath_join(xpath_states, f'state/@name="{state}"', inner=True)
|
||||
xpath = xpath_join(xpath, xpath_states, op='and', nexpr_parentheses=True)
|
||||
|
||||
xpath = xpath_join(xpath, 'action/@type="maintenance_release"', op='and')
|
||||
@ -1257,7 +1257,7 @@ def request_create_delete(apiurl, target_project, target_package, message=None):
|
||||
def create_function():
|
||||
return create_delete_request(apiurl, target_project, target_package, message)
|
||||
|
||||
return RequestFuture('delete {}/{}'.format(target_project, target_package), create_function)
|
||||
return RequestFuture(f'delete {target_project}/{target_package}', create_function)
|
||||
|
||||
|
||||
def request_create_change_devel(apiurl, source_project, source_package,
|
||||
@ -1363,7 +1363,7 @@ class RequestFuture:
|
||||
return None
|
||||
|
||||
request_id = self.create_tolerant()
|
||||
print('{} = {}'.format(self, request_id))
|
||||
print(f'{self} = {request_id}')
|
||||
return request_id
|
||||
|
||||
def __str__(self):
|
||||
@ -1383,9 +1383,9 @@ def add_description(request, text=None):
|
||||
|
||||
def message_suffix(action, message=None):
|
||||
if not message:
|
||||
message = '{} by OSRT tools'.format(action)
|
||||
message = f'{action} by OSRT tools'
|
||||
|
||||
message += ' (host {})'.format(socket.gethostname())
|
||||
message += f' (host {socket.gethostname()})'
|
||||
return message
|
||||
|
||||
|
||||
|
@ -33,7 +33,7 @@ class CpioFile(object):
|
||||
fields = struct.unpack(fmt, buf[self.off:off])
|
||||
|
||||
if fields[0] != "070701":
|
||||
raise Exception("invalid cpio header %s" % self.c_magic)
|
||||
raise Exception(f"invalid cpio header {self.c_magic}")
|
||||
|
||||
names = ("c_ino", "c_mode", "c_uid", "c_gid",
|
||||
"c_nlink", "c_mtime", "c_filesize",
|
||||
|
@ -42,7 +42,7 @@ class FreezeCommand(object):
|
||||
self.create_bootstrap_aggregate_file()
|
||||
|
||||
def bootstrap_packages(self):
|
||||
url = self.api.makeurl(['build', '{}:0-Bootstrap'.format(self.api.crings), '_result'])
|
||||
url = self.api.makeurl(['build', f'{self.api.crings}:0-Bootstrap', '_result'])
|
||||
f = self.api.retried_GET(url)
|
||||
root = ET.parse(f).getroot().find('result')
|
||||
res = list()
|
||||
@ -58,7 +58,7 @@ class FreezeCommand(object):
|
||||
|
||||
root = ET.Element('aggregatelist')
|
||||
a = ET.SubElement(root, 'aggregate',
|
||||
{'project': '{}:0-Bootstrap'.format(self.api.crings)})
|
||||
{'project': f'{self.api.crings}:0-Bootstrap'})
|
||||
|
||||
for package in self.bootstrap_packages():
|
||||
p = ET.SubElement(a, 'package')
|
||||
@ -112,7 +112,7 @@ class FreezeCommand(object):
|
||||
if self.api.is_adi_project(prj):
|
||||
src_prj = self.api.find_devel_project_from_adi_frozenlinks(self.prj)
|
||||
if src_prj is None:
|
||||
raise Exception("{} does not have a valid frozenlinks".format(self.prj))
|
||||
raise Exception(f"{self.prj} does not have a valid frozenlinks")
|
||||
else:
|
||||
self.api.update_adi_frozenlinks(self.prj, src_prj)
|
||||
return
|
||||
@ -150,7 +150,7 @@ class FreezeCommand(object):
|
||||
root = ET.Element('project', {'name': self.prj})
|
||||
ET.SubElement(root, 'title')
|
||||
ET.SubElement(root, 'description')
|
||||
links = self.projectlinks or ['{}:1-MinimalX'.format(self.api.crings)]
|
||||
links = self.projectlinks or [f'{self.api.crings}:1-MinimalX']
|
||||
for lprj in links:
|
||||
ET.SubElement(root, 'link', {'project': lprj})
|
||||
|
||||
|
@ -22,7 +22,7 @@ def describe(directory=None):
|
||||
def clone(url, directory):
|
||||
return_code = subprocess.call(['git', 'clone', url, directory])
|
||||
if return_code != 0:
|
||||
raise Exception('Failed to clone {}'.format(url))
|
||||
raise Exception(f'Failed to clone {url}')
|
||||
|
||||
|
||||
def sync(cache_dir, repo_url, message=None):
|
||||
@ -54,7 +54,7 @@ def sync(cache_dir, repo_url, message=None):
|
||||
os.chdir(repo_dir)
|
||||
return_code = subprocess.call([git_sync_exec])
|
||||
if return_code != 0:
|
||||
raise Exception('failed to sync {}'.format(repo_name))
|
||||
raise Exception(f'failed to sync {repo_name}')
|
||||
|
||||
os.chdir(cwd)
|
||||
|
||||
|
@ -15,7 +15,7 @@ class IgnoreCommand(object):
|
||||
"""
|
||||
|
||||
for request_id in RequestFinder.find_sr(requests, self.api):
|
||||
print('{}: ignored'.format(request_id))
|
||||
print(f'{request_id}: ignored')
|
||||
comment = message if message else self.MESSAGE
|
||||
self.api.add_ignored_request(request_id, comment)
|
||||
self.comment.add_comment(request_id=str(request_id), comment=comment)
|
||||
|
@ -52,7 +52,7 @@ class ListCommand:
|
||||
if not hide_source and action.find('source') is not None:
|
||||
source_project = action.find('source').get('project')
|
||||
source_project = self.project_strip(source_project)
|
||||
line += ' ({})'.format(Fore.YELLOW + source_project + Fore.RESET)
|
||||
line += f' ({Fore.YELLOW + source_project + Fore.RESET})'
|
||||
if action.get('type') == 'delete':
|
||||
line += ' (' + Fore.RED + 'delete request' + Fore.RESET + ')'
|
||||
|
||||
@ -72,10 +72,10 @@ class ListCommand:
|
||||
splitter.stageable = False
|
||||
for request_type in ('change_devel', 'set_bugowner'):
|
||||
splitter.reset()
|
||||
splitter.filter_add('./action[@type="{}"]'.format(request_type))
|
||||
splitter.filter_add(f'./action[@type="{request_type}"]')
|
||||
requests = splitter.filter_only()
|
||||
if len(requests):
|
||||
print('\n{} request(s)'.format(request_type))
|
||||
print(f'\n{request_type} request(s)')
|
||||
for request in sorted(requests, key=lambda s: s.get('id')):
|
||||
print(' {} {}'.format(
|
||||
self.api.makeurl(['request', 'show', request.get('id')]),
|
||||
|
@ -131,7 +131,7 @@ def memoize(ttl=None, session=False, add_invalidate=False):
|
||||
cache.clear()
|
||||
|
||||
def _add_invalidate_method(_self):
|
||||
name = '_invalidate_%s' % fn.__name__
|
||||
name = f'_invalidate_{fn.__name__}'
|
||||
if not hasattr(_self, name):
|
||||
setattr(_self, name, _invalidate)
|
||||
|
||||
|
@ -31,9 +31,9 @@ class OBSLock(object):
|
||||
"""Create a signature with a timestamp."""
|
||||
reason = str(self.reason)
|
||||
if self.reason_sub:
|
||||
reason += ' ({})'.format(self.reason_sub)
|
||||
reason += f' ({self.reason_sub})'
|
||||
reason = reason.replace('@', 'at').replace('#', 'hash')
|
||||
return '%s#%s@%s' % (self.user, reason, datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f'))
|
||||
return f"{self.user}#{reason}@{datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')}"
|
||||
|
||||
def _parse(self, signature):
|
||||
"""Parse a signature into an user and a timestamp."""
|
||||
@ -50,7 +50,7 @@ class OBSLock(object):
|
||||
return user, reason, reason_sub, ts
|
||||
|
||||
def _read(self):
|
||||
url = makeurl(self.apiurl, ['source', self.lock, '_attribute', '%s:LockedBy' % self.ns])
|
||||
url = makeurl(self.apiurl, ['source', self.lock, '_attribute', f'{self.ns}:LockedBy'])
|
||||
try:
|
||||
root = ET.parse(http_GET(url)).getroot()
|
||||
except HTTPError as e:
|
||||
@ -66,12 +66,12 @@ class OBSLock(object):
|
||||
|
||||
def _write(self, signature):
|
||||
url = makeurl(self.apiurl, ['source', self.lock, '_attribute'])
|
||||
data = """
|
||||
data = f"""
|
||||
<attributes>
|
||||
<attribute namespace='%s' name='LockedBy'>
|
||||
<value>%s</value>
|
||||
<attribute namespace='{self.ns}' name='LockedBy'>
|
||||
<value>{signature}</value>
|
||||
</attribute>
|
||||
</attributes>""" % (self.ns, signature)
|
||||
</attributes>"""
|
||||
http_POST(url, data=data)
|
||||
|
||||
def acquire(self):
|
||||
@ -88,7 +88,7 @@ class OBSLock(object):
|
||||
if user and ts:
|
||||
now = datetime.utcnow()
|
||||
if now < ts:
|
||||
raise Exception('Lock acquired from the future [%s] by [%s]. Try later.' % (ts, user))
|
||||
raise Exception(f'Lock acquired from the future [{ts}] by [{user}]. Try later.')
|
||||
delta = now - ts
|
||||
if delta.total_seconds() < self.ttl:
|
||||
# Existing lock that has not expired.
|
||||
@ -103,14 +103,14 @@ class OBSLock(object):
|
||||
stop = False
|
||||
|
||||
if stop:
|
||||
print('Lock acquired by [%s] %s ago, reason <%s>. Try later.' % (user, delta, reason))
|
||||
print(f'Lock acquired by [{user}] {delta} ago, reason <{reason}>. Try later.')
|
||||
exit(-1)
|
||||
self._write(self._signature())
|
||||
|
||||
time.sleep(1)
|
||||
user, _, _, _ = self._parse(self._read())
|
||||
if user != self.user:
|
||||
raise Exception('Race condition, [%s] wins. Try later.' % user)
|
||||
raise Exception(f'Race condition, [{user}] wins. Try later.')
|
||||
self.locked = True
|
||||
|
||||
return self
|
||||
|
@ -208,7 +208,7 @@ def config_resolve_variables(config, config_project):
|
||||
|
||||
|
||||
def config_resolve_variable(value, config_project, key='config'):
|
||||
prefix = '<{}:'.format(key)
|
||||
prefix = f'<{key}:'
|
||||
end = value.rfind('>')
|
||||
if not value.startswith(prefix) or end == -1:
|
||||
return value
|
||||
@ -488,7 +488,7 @@ def policy_get_preprocess(apiurl, origin, policy):
|
||||
project = origin.rstrip('~')
|
||||
config_project = Config.get(apiurl, project)
|
||||
for suffix in ('', '_update'):
|
||||
key = 'pending_submission_allowed_reviews{}'.format(suffix)
|
||||
key = f'pending_submission_allowed_reviews{suffix}'
|
||||
policy[key] = list(filter(None, [
|
||||
config_resolve_variable(v, config_project, 'config_source')
|
||||
for v in policy[key]]))
|
||||
@ -573,7 +573,7 @@ def policy_input_evaluate(policy, inputs) -> PolicyResult:
|
||||
result.reviews['fallback'] = 'Changing to a higher priority origin, but from another family.'
|
||||
elif inputs['direction'] != 'forward':
|
||||
result.reviews['fallback'] = \
|
||||
'Changing to a higher priority origin, but {} direction.'.format(inputs['direction'])
|
||||
f"Changing to a higher priority origin, but {inputs['direction']} direction."
|
||||
else:
|
||||
result.reviews['fallback'] = 'Changing to a lower priority origin.'
|
||||
else:
|
||||
@ -584,7 +584,7 @@ def policy_input_evaluate(policy, inputs) -> PolicyResult:
|
||||
if not policy['automatic_updates']:
|
||||
result.reviews['fallback'] = 'Forward direction, but automatic updates not allowed.'
|
||||
else:
|
||||
result.reviews['fallback'] = '{} direction.'.format(inputs['direction'])
|
||||
result.reviews['fallback'] = f"{inputs['direction']} direction."
|
||||
|
||||
if inputs['pending_submission'] is not False:
|
||||
reviews_not_allowed = policy_input_evaluate_reviews_not_allowed(policy, inputs)
|
||||
@ -822,7 +822,7 @@ def origin_update_pending(apiurl, origin_project, package, target_project, polic
|
||||
continue
|
||||
|
||||
identifier = request_remote_identifier(apiurl, apiurl_remote, request.reqid)
|
||||
message = 'Newer pending source available from package origin. See {}.'.format(identifier)
|
||||
message = f'Newer pending source available from package origin. See {identifier}.'
|
||||
src_project = project_remote_prefixed(apiurl, apiurl_remote, action.src_project)
|
||||
return request_create_submit(apiurl, src_project, action.src_package,
|
||||
target_project, package, message=message, revision=action.src_rev,
|
||||
@ -834,7 +834,7 @@ def origin_update_pending(apiurl, origin_project, package, target_project, polic
|
||||
def origin_update_mode(apiurl, target_project, package, policy, origin_project):
|
||||
values = {}
|
||||
for key in ('skip', 'supersede', 'delay', 'frequency'):
|
||||
attribute = 'OriginUpdate{}'.format(key.capitalize())
|
||||
attribute = f'OriginUpdate{key.capitalize()}'
|
||||
for project in (origin_project, target_project):
|
||||
for package_attribute in (package, None):
|
||||
value = attribute_value_load(apiurl, project, attribute, package=package_attribute)
|
||||
|
@ -13,7 +13,7 @@ class PrioCommand(object):
|
||||
:param project: project to check
|
||||
|
||||
"""
|
||||
message = 'raising priority for %s' % status.get('name')
|
||||
message = f"raising priority for {status.get('name')}"
|
||||
for r in status.findall('missing_reviews/review'):
|
||||
reqid = r.get('request')
|
||||
req = osc.core.get_request(self.api.apiurl, reqid)
|
||||
|
@ -16,10 +16,10 @@ class RepairCommand(object):
|
||||
req = get_request(self.api.apiurl, reqid)
|
||||
|
||||
if not req:
|
||||
raise oscerr.WrongArgs('Request {} not found'.format(reqid))
|
||||
raise oscerr.WrongArgs(f'Request {reqid} not found')
|
||||
|
||||
if req.state.name != 'review':
|
||||
print('Request "{}" is not in review state'.format(reqid))
|
||||
print(f'Request "{reqid}" is not in review state')
|
||||
return
|
||||
|
||||
reviews = [r.by_project for r in req.reviews if ':Staging:' in str(r.by_project) and r.state == 'new']
|
||||
@ -27,9 +27,9 @@ class RepairCommand(object):
|
||||
if reviews:
|
||||
if len(reviews) > 1:
|
||||
raise oscerr.WrongArgs(
|
||||
'Request {} had multiple review opened by different staging project'.format(reqid))
|
||||
f'Request {reqid} had multiple review opened by different staging project')
|
||||
else:
|
||||
raise oscerr.WrongArgs('Request {} is not for staging project'.format(reqid))
|
||||
raise oscerr.WrongArgs(f'Request {reqid} is not for staging project')
|
||||
|
||||
staging_project = reviews[0]
|
||||
try:
|
||||
@ -42,15 +42,15 @@ class RepairCommand(object):
|
||||
if data is not None:
|
||||
for request in data.findall('staged_requests/requests'):
|
||||
if request.get('id') == reqid:
|
||||
print('Request "{}" had the good setup in "{}"'.format(reqid, staging_project))
|
||||
print(f'Request "{reqid}" had the good setup in "{staging_project}"')
|
||||
return
|
||||
else:
|
||||
# this situation should only happen on adi staging
|
||||
print('Project is not exist, re-creating "{}"'.format(staging_project))
|
||||
print(f'Project is not exist, re-creating "{staging_project}"')
|
||||
self.api.create_adi_project(staging_project)
|
||||
|
||||
# a bad request setup found
|
||||
print('Repairing "{}"'.format(reqid))
|
||||
print(f'Repairing "{reqid}"')
|
||||
change_review_state(self.api.apiurl, reqid, newstate='accepted',
|
||||
message='Re-evaluation needed', by_project=staging_project)
|
||||
self.api.add_review(reqid, by_group=self.api.cstaging_group, msg='Requesting new staging review')
|
||||
@ -64,7 +64,7 @@ class RepairCommand(object):
|
||||
if cleanup:
|
||||
untracked = self.api.project_status_requests('untracked')
|
||||
if len(untracked) > 0:
|
||||
print('Cleanup {} untracked requests'.format(len(untracked)))
|
||||
print(f'Cleanup {len(untracked)} untracked requests')
|
||||
packages += tuple(untracked)
|
||||
|
||||
for reqid in RequestFinder.find_sr(packages, self.api):
|
||||
|
@ -29,7 +29,7 @@ class CorruptRepos(Exception):
|
||||
|
||||
|
||||
def _format_pkg(sp):
|
||||
return "{}-{}-{}.{}".format(sp[0], sp[1], sp[2], sp[3])
|
||||
return f"{sp[0]}-{sp[1]}-{sp[2]}.{sp[3]}"
|
||||
|
||||
|
||||
def _check_exists_in_whitelist(sp, whitelist):
|
||||
@ -37,7 +37,7 @@ def _check_exists_in_whitelist(sp, whitelist):
|
||||
logger.debug("Found %s in whitelist, ignoring", sp[0])
|
||||
return True
|
||||
# check with version
|
||||
long_name = "{}-{}".format(sp[0], sp[1])
|
||||
long_name = f"{sp[0]}-{sp[1]}"
|
||||
if long_name in whitelist:
|
||||
logger.debug("Found %s in whitelist, ignoring", long_name)
|
||||
return True
|
||||
@ -48,7 +48,7 @@ def _check_exists_in_whitelist(sp, whitelist):
|
||||
|
||||
|
||||
def _check_colon_format(sp1, sp2, whitelist):
|
||||
if "{}:{}".format(sp1, sp2) in whitelist:
|
||||
if f"{sp1}:{sp2}" in whitelist:
|
||||
logger.debug("Found %s:%s in whitelist, ignoring", sp1, sp2)
|
||||
return True
|
||||
|
||||
@ -114,9 +114,9 @@ def _fileconflicts(pfile, arch, target_packages, whitelist):
|
||||
logger.debug("Packages %s and %s with conflicting files conflict", pkgcanon1, pkgcanon2)
|
||||
continue
|
||||
|
||||
output += "found conflict of {} with {}\n".format(_format_pkg(sp1), _format_pkg(sp2))
|
||||
output += f"found conflict of {_format_pkg(sp1)} with {_format_pkg(sp2)}\n"
|
||||
for file in conflict['conflicts'].split('\n'):
|
||||
output += " {}\n".format(file)
|
||||
output += f" {file}\n"
|
||||
output += "\n"
|
||||
|
||||
if len(output):
|
||||
@ -162,7 +162,7 @@ def parsed_installcheck(repos, arch, target_packages, whitelist):
|
||||
if package not in target_packages:
|
||||
continue
|
||||
if package in whitelist:
|
||||
logger.debug("{} fails installcheck but is white listed".format(package))
|
||||
logger.debug(f"{package} fails installcheck but is white listed")
|
||||
continue
|
||||
reported_problems[package] = {'problem': match.group(
|
||||
1) + match.group(2), 'output': [], 'source': target_packages[package]}
|
||||
@ -212,7 +212,7 @@ def installcheck(directories, arch, whitelist, ignore_conflicts):
|
||||
|
||||
def mirrorRepomd(cachedir, url):
|
||||
# Use repomd.xml to get the location of primary.xml.*
|
||||
repoindex = ET.fromstring(requests.get('{}/repodata/repomd.xml'.format(url)).content)
|
||||
repoindex = ET.fromstring(requests.get(f'{url}/repodata/repomd.xml').content)
|
||||
primarypath = repoindex.xpath("string(./repo:data[@type='primary']/repo:location/@href)",
|
||||
namespaces={'repo': 'http://linux.duke.edu/metadata/repo'})
|
||||
|
||||
@ -239,18 +239,18 @@ def mirror(apiurl, project, repository, arch):
|
||||
os.makedirs(directory)
|
||||
|
||||
meta = ET.parse(http_GET(makeurl(apiurl, ['source', project, '_meta']))).getroot()
|
||||
repotag = meta.xpath("/project/repository[@name='{}']".format(repository))[0]
|
||||
repotag = meta.xpath(f"/project/repository[@name='{repository}']")[0]
|
||||
if arch not in repotag.xpath("./arch/text()"):
|
||||
# Arch not in this project, skip mirroring
|
||||
return directory
|
||||
|
||||
download = repotag.xpath("./download[@arch='{}']".format(arch))
|
||||
download = repotag.xpath(f"./download[@arch='{arch}']")
|
||||
if download is not None and len(download) > 0:
|
||||
if len(download) > 1:
|
||||
raise Exception('Multiple download urls unsupported')
|
||||
repotype = download[0].get('repotype')
|
||||
if repotype != 'rpmmd':
|
||||
raise Exception('repotype {} not supported'.format(repotype))
|
||||
raise Exception(f'repotype {repotype} not supported')
|
||||
return mirrorRepomd(directory, download[0].get('url'))
|
||||
|
||||
rm = RepoMirror(apiurl)
|
||||
|
@ -109,7 +109,7 @@ class RequestFinder(object):
|
||||
:param newcand: the review state of staging-group must be new
|
||||
"""
|
||||
|
||||
query = 'types=submit,delete&states=new,review&project={}&view=collection'.format(self.api.project)
|
||||
query = f'types=submit,delete&states=new,review&project={self.api.project}&view=collection'
|
||||
url = makeurl(self.api.apiurl, ['request'], query)
|
||||
f = http_GET(url)
|
||||
root = ET.parse(f).getroot()
|
||||
@ -149,7 +149,7 @@ class RequestFinder(object):
|
||||
continue
|
||||
if consider_stagings and self.find_staging_project(p):
|
||||
continue
|
||||
raise oscerr.WrongArgs('No SR# found for: {}'.format(p))
|
||||
raise oscerr.WrongArgs(f'No SR# found for: {p}')
|
||||
|
||||
def find_via_stagingapi(self, pkgs):
|
||||
"""
|
||||
@ -173,7 +173,7 @@ class RequestFinder(object):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
raise oscerr.WrongArgs('No SR# found for: {}'.format(p))
|
||||
raise oscerr.WrongArgs(f'No SR# found for: {p}')
|
||||
|
||||
def find_staging_project(self, project):
|
||||
"""
|
||||
|
@ -47,7 +47,7 @@ class RequestSplitter(object):
|
||||
def strategy_set(self, name, **kwargs):
|
||||
self.reset()
|
||||
|
||||
class_name = 'Strategy{}'.format(name.lower().title())
|
||||
class_name = f'Strategy{name.lower().title()}'
|
||||
cls = globals()[class_name]
|
||||
self.strategy = cls(**kwargs)
|
||||
self.strategy.apply(self)
|
||||
@ -276,7 +276,7 @@ class RequestSplitter(object):
|
||||
|
||||
def requests_assign(self, group, staging, merge=False):
|
||||
# Arbitrary, but descriptive group key for proposal.
|
||||
key = '{}#{}@{}'.format(len(self.proposal), self.strategy.key, group)
|
||||
key = f'{len(self.proposal)}#{self.strategy.key}@{group}'
|
||||
self.proposal[key] = {
|
||||
'bootstrap_required': self.grouped[group]['bootstrap_required'],
|
||||
'group': group,
|
||||
|
@ -67,7 +67,7 @@ class SelectCommand(object):
|
||||
|
||||
if request not in staged_requests and not supersede:
|
||||
# Normal 'select' command
|
||||
print('Adding request "{}" to project "{}"'.format(request, self.target_project))
|
||||
print(f'Adding request "{request}" to project "{self.target_project}"')
|
||||
|
||||
return self.api.rq_to_prj(request, self.target_project, remove_exclusion)
|
||||
elif request in staged_requests and (move or supersede):
|
||||
@ -75,17 +75,17 @@ class SelectCommand(object):
|
||||
# supersede = (new_rq, package, project)
|
||||
fprj = self.api.packages_staged[staged_requests[request]]['prj'] if not supersede else supersede[2]
|
||||
if filter_from and filter_from != fprj:
|
||||
print('Ignoring "{}" in "{}" since not in "{}"'.format(request, fprj, filter_from))
|
||||
print(f'Ignoring "{request}" in "{fprj}" since not in "{filter_from}"')
|
||||
return True
|
||||
|
||||
if supersede:
|
||||
print('"{} ({}) is superseded by {}'.format(request, supersede[1], supersede[0]))
|
||||
print(f'"{request} ({supersede[1]}) is superseded by {supersede[0]}')
|
||||
|
||||
if fprj == self.target_project:
|
||||
print('"{}" is currently in "{}"'.format(request, self.target_project))
|
||||
print(f'"{request}" is currently in "{self.target_project}"')
|
||||
return False
|
||||
|
||||
print('Moving "{}" from "{}" to "{}"'.format(request, fprj, self.target_project))
|
||||
print(f'Moving "{request}" from "{fprj}" to "{self.target_project}"')
|
||||
|
||||
# Store the source project, we also need to write a comment there
|
||||
self.affected_projects.add(fprj)
|
||||
@ -102,7 +102,7 @@ class SelectCommand(object):
|
||||
print(msg)
|
||||
return True
|
||||
elif supersede:
|
||||
print('"{} ({}) supersedes {}'.format(request, supersede[1], supersede[0]))
|
||||
print(f'"{request} ({supersede[1]}) supersedes {supersede[0]}')
|
||||
else:
|
||||
raise oscerr.WrongArgs('Arguments for select are not correct.')
|
||||
|
||||
@ -132,7 +132,7 @@ class SelectCommand(object):
|
||||
requests = RequestFinder.find_sr(requests, self.api, newcand, consider_stagings=move)
|
||||
requests_count = len(requests)
|
||||
for index, request in enumerate(requests, start=1):
|
||||
print('({}/{}) '.format(index, requests_count), end='')
|
||||
print(f'({index}/{requests_count}) ', end='')
|
||||
if not self.select_request(request, move, filter_from, remove_exclusion=remove_exclusion):
|
||||
return False
|
||||
|
||||
|
@ -97,8 +97,8 @@ class StagingAPI(object):
|
||||
# If the project support rings, inititialize some variables.
|
||||
if self.crings:
|
||||
self._rings = (
|
||||
'{}:0-Bootstrap'.format(self.crings),
|
||||
'{}:1-MinimalX'.format(self.crings)
|
||||
f'{self.crings}:0-Bootstrap',
|
||||
f'{self.crings}:1-MinimalX'
|
||||
)
|
||||
else:
|
||||
self._rings = []
|
||||
@ -167,9 +167,9 @@ class StagingAPI(object):
|
||||
return func(url)
|
||||
except HTTPError as e:
|
||||
if 500 <= e.code <= 599:
|
||||
print('Error {}, retrying {} in {}s'.format(e.code, url, retry_sleep_seconds))
|
||||
print(f'Error {e.code}, retrying {url} in {retry_sleep_seconds}s')
|
||||
elif e.code == 400 and e.reason == 'service in progress':
|
||||
print('Service in progress, retrying {} in {}s'.format(url, retry_sleep_seconds))
|
||||
print(f'Service in progress, retrying {url} in {retry_sleep_seconds}s')
|
||||
else:
|
||||
raise e
|
||||
time.sleep(retry_sleep_seconds)
|
||||
@ -327,7 +327,7 @@ class StagingAPI(object):
|
||||
def prj_from_short(self, name):
|
||||
if name.startswith(self.cstaging):
|
||||
return name
|
||||
return '{}:{}'.format(self.cstaging, name)
|
||||
return f'{self.cstaging}:{name}'
|
||||
|
||||
def get_staging_projects_short(self, adi=False):
|
||||
"""
|
||||
@ -381,7 +381,7 @@ class StagingAPI(object):
|
||||
|
||||
req = get_request(self.apiurl, str(request_id))
|
||||
if not req:
|
||||
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
||||
raise oscerr.WrongArgs(f'Request {request_id} not found')
|
||||
|
||||
for review in req.reviews:
|
||||
if review.by_group == by_group and \
|
||||
@ -434,7 +434,7 @@ class StagingAPI(object):
|
||||
request_id = int(request.get('id'))
|
||||
action = request.find('action')
|
||||
if action is None:
|
||||
msg = 'Request {} has no action'.format(request_id)
|
||||
msg = f'Request {request_id} has no action'
|
||||
raise oscerr.WrongArgs(msg)
|
||||
|
||||
# Where are we targeting the package
|
||||
@ -511,7 +511,7 @@ class StagingAPI(object):
|
||||
else:
|
||||
# Supersedes request is from the same project
|
||||
if request_new.find('./action/source').get('project') == request_old.find('./action/source').get('project'):
|
||||
message = 'sr#{} has newer source and is from the same project'.format(request_new.get('id'))
|
||||
message = f"sr#{request_new.get('id')} has newer source and is from the same project"
|
||||
|
||||
self.rm_from_prj(stage_info['prj'], request_id=stage_info['rq_id'])
|
||||
self.do_change_review_state(stage_info['rq_id'], 'declined',
|
||||
@ -587,8 +587,8 @@ class StagingAPI(object):
|
||||
requests = []
|
||||
|
||||
# xpath query, using the -m, -r, -s options
|
||||
where = "@by_group='{}' and @state='new'".format(self.cstaging_group)
|
||||
target = "target[@project='{}']".format(self.project)
|
||||
where = f"@by_group='{self.cstaging_group}' and @state='new'"
|
||||
target = f"target[@project='{self.project}']"
|
||||
|
||||
query = {'match': f"state/@name='review' and review[{where}] and {target}"}
|
||||
if query_extra is not None:
|
||||
@ -763,13 +763,13 @@ class StagingAPI(object):
|
||||
|
||||
def format_review(self, review):
|
||||
if review.get('by_group'):
|
||||
return 'group:{}'.format(review.get('by_group'))
|
||||
return f"group:{review.get('by_group')}"
|
||||
if review.get('by_user'):
|
||||
return review.get('by_user')
|
||||
if review.get('by_package'):
|
||||
return 'package:{}'.format(review.get('by_package'))
|
||||
return f"package:{review.get('by_package')}"
|
||||
if review.get('by_project'):
|
||||
return 'project:{}'.format(review.get('by_project'))
|
||||
return f"project:{review.get('by_project')}"
|
||||
raise oscerr.WrongArgs('Invalid review')
|
||||
|
||||
def job_history_fail_count(self, history):
|
||||
@ -806,7 +806,7 @@ class StagingAPI(object):
|
||||
if size:
|
||||
offset += int(size[0])
|
||||
|
||||
query = {'nostream': '1', 'start': '%s' % offset}
|
||||
query = {'nostream': '1', 'start': f'{offset}'}
|
||||
if last:
|
||||
query['last'] = 1
|
||||
log = StringIO()
|
||||
@ -904,7 +904,7 @@ class StagingAPI(object):
|
||||
|
||||
req = get_request(self.apiurl, str(request_id))
|
||||
if not req:
|
||||
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
||||
raise oscerr.WrongArgs(f'Request {request_id} not found')
|
||||
|
||||
act = req.get_actions('submit')
|
||||
if act:
|
||||
@ -949,7 +949,7 @@ class StagingAPI(object):
|
||||
if '_multibuild' in filelist:
|
||||
return []
|
||||
|
||||
mainspec = "{}{}".format(package, '.spec')
|
||||
mainspec = f"{package}.spec"
|
||||
if mainspec in filelist:
|
||||
filelist.remove(mainspec)
|
||||
for file in filelist:
|
||||
@ -1033,7 +1033,7 @@ class StagingAPI(object):
|
||||
# dynamically generated and static baselibs.conf.
|
||||
if self.is_adi_project(project):
|
||||
baselibs = False
|
||||
specfile = source_file_load(self.apiurl, src_prj, src_pkg, '{}.spec'.format(src_pkg), src_rev)
|
||||
specfile = source_file_load(self.apiurl, src_prj, src_pkg, f'{src_pkg}.spec', src_rev)
|
||||
if specfile and 'baselibs.conf' in specfile:
|
||||
baselibs = True
|
||||
else:
|
||||
@ -1047,7 +1047,7 @@ class StagingAPI(object):
|
||||
http_PUT(url, data=ET.tostring(root))
|
||||
|
||||
if baselibs is False:
|
||||
specfile = source_file_load(self.apiurl, src_prj, src_pkg, '{}.spec'.format(sub_pkg), src_rev)
|
||||
specfile = source_file_load(self.apiurl, src_prj, src_pkg, f'{sub_pkg}.spec', src_rev)
|
||||
if specfile and 'baselibs.conf' in specfile:
|
||||
baselibs = True
|
||||
|
||||
@ -1062,11 +1062,11 @@ class StagingAPI(object):
|
||||
|
||||
def ensure_staging_archs(self, project):
|
||||
meta = ET.parse(http_GET(self.project_meta_url(project)))
|
||||
repository = meta.find('repository[@name="{}"]'.format(self.cmain_repo))
|
||||
repository = meta.find(f'repository[@name="{self.cmain_repo}"]')
|
||||
|
||||
changed = False
|
||||
for arch in self.cstaging_archs:
|
||||
if not repository.xpath('./arch[text()="{}"]'.format(arch)):
|
||||
if not repository.xpath(f'./arch[text()="{arch}"]'):
|
||||
elm = ET.SubElement(repository, 'arch')
|
||||
elm.text = arch
|
||||
changed = True
|
||||
@ -1083,18 +1083,18 @@ class StagingAPI(object):
|
||||
def prj_from_letter(self, letter):
|
||||
if ':' in letter: # not a letter
|
||||
return letter
|
||||
return '{}:{}'.format(self.cstaging, letter)
|
||||
return f'{self.cstaging}:{letter}'
|
||||
|
||||
def adi_prj_from_number(self, number):
|
||||
if ':' in str(number):
|
||||
return number
|
||||
return '{}:adi:{}'.format(self.cstaging, number)
|
||||
return f'{self.cstaging}:adi:{number}'
|
||||
|
||||
def list_requests_in_prj(self, project):
|
||||
where = "@by_project='%s'+and+@state='new'" % project
|
||||
where = f"@by_project='{project}'+and+@state='new'"
|
||||
|
||||
url = self.makeurl(['search', 'request', 'id'],
|
||||
"match=state/@name='review'+and+review[%s]" % where)
|
||||
f"match=state/@name='review'+and+review[{where}]")
|
||||
f = http_GET(url)
|
||||
root = ET.parse(f).getroot()
|
||||
list = []
|
||||
@ -1111,7 +1111,7 @@ class StagingAPI(object):
|
||||
"""
|
||||
req = get_request(self.apiurl, str(request_id))
|
||||
if not req:
|
||||
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
||||
raise oscerr.WrongArgs(f'Request {request_id} not found')
|
||||
for i in req.reviews:
|
||||
if by_project and i.by_project == by_project and i.state == 'new':
|
||||
return
|
||||
@ -1131,7 +1131,7 @@ class StagingAPI(object):
|
||||
if by_group:
|
||||
query['by_group'] = by_group
|
||||
if not msg:
|
||||
msg = 'Being evaluated by group "{}"'.format(by_group)
|
||||
msg = f'Being evaluated by group "{by_group}"'
|
||||
if not query:
|
||||
raise oscerr.WrongArgs('We need a group or a project')
|
||||
query['cmd'] = 'addreview'
|
||||
@ -1215,7 +1215,7 @@ class StagingAPI(object):
|
||||
|
||||
version = None
|
||||
|
||||
specfile = source_file_load(self.apiurl, project, package, '{}.spec'.format(package))
|
||||
specfile = source_file_load(self.apiurl, project, package, f'{package}.spec')
|
||||
if specfile:
|
||||
try:
|
||||
version = re.findall('^Version:(.*)', specfile, re.MULTILINE)[0].strip()
|
||||
@ -1227,7 +1227,7 @@ class StagingAPI(object):
|
||||
"""
|
||||
Return the version of a built rpm file
|
||||
"""
|
||||
url = self.makeurl(['build', project, repository, arch, '_repository', "%s?view=fileinfo" % rpm])
|
||||
url = self.makeurl(['build', project, repository, arch, '_repository', f"{rpm}?view=fileinfo"])
|
||||
try:
|
||||
return ET.parse(http_GET(url)).getroot().find('version').text
|
||||
except HTTPError as e:
|
||||
@ -1289,7 +1289,7 @@ class StagingAPI(object):
|
||||
return results
|
||||
|
||||
def is_repo_dirty(self, project, repository):
|
||||
url = self.makeurl(['build', project, '_result?code=broken&repository=%s' % repository])
|
||||
url = self.makeurl(['build', project, f'_result?code=broken&repository={repository}'])
|
||||
root = ET.parse(http_GET(url)).getroot()
|
||||
for repo in root.findall('result'):
|
||||
repostate = repo.get('state', 'missing')
|
||||
@ -1331,10 +1331,10 @@ class StagingAPI(object):
|
||||
u = self.makeurl(['build', prj], query=query)
|
||||
|
||||
try:
|
||||
print("tried to trigger rebuild for project '%s' package '%s'" % (prj, pkg))
|
||||
print(f"tried to trigger rebuild for project '{prj}' package '{pkg}'")
|
||||
http_POST(u)
|
||||
except HTTPError:
|
||||
print("could not trigger rebuild for project '%s' package '%s'" % (prj, pkg))
|
||||
print(f"could not trigger rebuild for project '{prj}' package '{pkg}'")
|
||||
|
||||
def _candidate_adi_project(self):
|
||||
"""Decide a candidate name for an ADI project."""
|
||||
@ -1349,7 +1349,7 @@ class StagingAPI(object):
|
||||
|
||||
def update_adi_frozenlinks(self, name, src_prj):
|
||||
xpath = {
|
||||
'package': "@project='%s' and devel/@project='%s'" % (self.project, src_prj),
|
||||
'package': f"@project='{self.project}' and devel/@project='{src_prj}'",
|
||||
}
|
||||
collection = search(self.apiurl, **xpath)['package']
|
||||
|
||||
@ -1383,10 +1383,10 @@ class StagingAPI(object):
|
||||
|
||||
adi_projects = self.get_adi_projects()
|
||||
if name in adi_projects:
|
||||
raise Exception('Project {} already exist'.format(name))
|
||||
raise Exception(f'Project {name} already exist')
|
||||
|
||||
if use_frozenlinks:
|
||||
linkproject = '<link project="{}"/>'.format(self.project)
|
||||
linkproject = f'<link project="{self.project}"/>'
|
||||
repository = '<repository name="standard" rebuild="direct" linkedbuild="all">'
|
||||
else:
|
||||
linkproject = ''
|
||||
@ -1471,7 +1471,7 @@ class StagingAPI(object):
|
||||
name.text = check
|
||||
|
||||
meta = ET.parse(http_GET(self.project_meta_url(project)))
|
||||
repository = meta.find('repository[@name="{}"]'.format(self.cmain_repo))
|
||||
repository = meta.find(f'repository[@name="{self.cmain_repo}"]')
|
||||
|
||||
for arch_element in repository.findall('arch'):
|
||||
architecture = arch_element.text
|
||||
@ -1480,7 +1480,7 @@ class StagingAPI(object):
|
||||
http_POST(url, data=ET.tostring(root))
|
||||
|
||||
def register_new_staging_project(self, name):
|
||||
data = '<workflow><staging_project>{}</staging_project></workflow>'.format(name)
|
||||
data = f'<workflow><staging_project>{name}</staging_project></workflow>'
|
||||
url = self.makeurl(['staging', self.project, 'staging_projects'])
|
||||
try:
|
||||
http_POST(url, data=data)
|
||||
@ -1492,7 +1492,7 @@ class StagingAPI(object):
|
||||
def is_user_member_of(self, user, group):
|
||||
root = ET.fromstring(get_group(self.apiurl, group))
|
||||
|
||||
if root.findall("./person/person[@userid='%s']" % user):
|
||||
if root.findall(f"./person/person[@userid='{user}']"):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@ -1520,7 +1520,7 @@ class StagingAPI(object):
|
||||
if self.rings:
|
||||
# Determine if staging is bootstrapped.
|
||||
meta = self.get_prj_meta(project)
|
||||
xpath = 'link[@project="{}"]'.format(self.rings[0])
|
||||
xpath = f'link[@project="{self.rings[0]}"]'
|
||||
return meta.find(xpath) is not None
|
||||
|
||||
return False
|
||||
|
@ -25,7 +25,7 @@ class UnignoreCommand(object):
|
||||
else:
|
||||
for request_id in RequestFinder.find_sr(requests, self.api):
|
||||
if request_id in requests_ignored.keys():
|
||||
print('{}: unignored'.format(request_id))
|
||||
print(f'{request_id}: unignored')
|
||||
del requests_ignored[request_id]
|
||||
self.api.del_ignored_request(request_id)
|
||||
self.comment.add_comment(request_id=str(request_id), comment=self.MESSAGE)
|
||||
|
@ -45,7 +45,7 @@ class UnselectCommand(object):
|
||||
if cleanup:
|
||||
obsolete = self.api.project_status_requests('obsolete', self.filter_obsolete)
|
||||
if len(obsolete) > 0:
|
||||
print('Cleanup {} obsolete requests'.format(len(obsolete)))
|
||||
print(f'Cleanup {len(obsolete)} obsolete requests')
|
||||
packages += tuple(obsolete)
|
||||
|
||||
affected_projects = set()
|
||||
@ -53,7 +53,7 @@ class UnselectCommand(object):
|
||||
self.api).items():
|
||||
staging_project = request_project['staging']
|
||||
affected_projects.add(staging_project)
|
||||
print('Unselecting "{}" from "{}"'.format(request, staging_project))
|
||||
print(f'Unselecting "{request}" from "{staging_project}"')
|
||||
self.api.rm_from_prj(staging_project, request_id=request)
|
||||
|
||||
req = get_request(self.api.apiurl, str(request))
|
||||
|
@ -170,12 +170,12 @@ def mail_send(apiurl, project, to, subject, body, from_key='maintainer',
|
||||
if from_key is None:
|
||||
sender = entity_email(apiurl, conf.get_apiurl_usr(apiurl), include_name=True)
|
||||
else:
|
||||
sender = config['mail-{}'.format(from_key)]
|
||||
sender = config[f'mail-{from_key}']
|
||||
|
||||
if '@' not in to:
|
||||
to = config['mail-{}'.format(to)]
|
||||
to = config[f'mail-{to}']
|
||||
|
||||
followup_to = config.get('mail-{}'.format(followup_to_key))
|
||||
followup_to = config.get(f'mail-{followup_to_key}')
|
||||
relay = config.get('mail-relay', 'relay.suse.de')
|
||||
|
||||
mail_send_with_details(text=body, subject=subject, relay=relay, sender=sender,
|
||||
|
@ -46,7 +46,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
||||
@cmdln.option('-d', '--dry', help='no modifications uploaded')
|
||||
@cmdln.option('-p', '--project', help='target project')
|
||||
@cmdln.option('-g', '--git-url', help='git repository for target project')
|
||||
@cmdln.option('-s', '--scope', help='scope on which to operate ({}, staging:$letter)'.format(', '.join(SCOPES)))
|
||||
@cmdln.option('-s', '--scope', help=f"scope on which to operate ({', '.join(SCOPES)}, staging:$letter)")
|
||||
@cmdln.option('--no-checkout', action='store_true', help='reuse checkout in cache')
|
||||
@cmdln.option('--stop-after-solve', action='store_true', help='only create group files')
|
||||
@cmdln.option('--staging', help='Only solve that one staging')
|
||||
@ -115,4 +115,4 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
||||
elif scope == 'ring1':
|
||||
return solve_project(api.rings[1], scope)
|
||||
else:
|
||||
raise ValueError('scope "{}" must be one of: {}'.format(scope, ', '.join(self.SCOPES)))
|
||||
raise ValueError(f"scope \"{scope}\" must be one of: {', '.join(self.SCOPES)}")
|
||||
|
@ -44,7 +44,7 @@ def copy_directory_contents(source, destination, ignore_list=[]):
|
||||
|
||||
|
||||
def change_extension(path, original, final):
|
||||
for name in glob.glob(os.path.join(path, '*{}'.format(original))):
|
||||
for name in glob.glob(os.path.join(path, f'*{original}')):
|
||||
# Assumes the extension is only found at the end.
|
||||
os.rename(name, name.replace(original, final))
|
||||
|
||||
|
@ -89,7 +89,7 @@ class Group(object):
|
||||
|
||||
def _verify_solved(self):
|
||||
if not self.solved:
|
||||
raise Exception('group {} not solved'.format(self.name))
|
||||
raise Exception(f'group {self.name} not solved')
|
||||
|
||||
def inherit(self, group):
|
||||
for arch in self.architectures:
|
||||
@ -144,7 +144,7 @@ class Group(object):
|
||||
jobs = list(self.pkglist.lockjobs[arch])
|
||||
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
||||
if sel.isempty():
|
||||
self.logger.debug('{}.{}: package {} not found'.format(self.name, arch, n))
|
||||
self.logger.debug(f'{self.name}.{arch}: package {n} not found')
|
||||
self.not_found.setdefault(n, set()).add(arch)
|
||||
return
|
||||
else:
|
||||
@ -168,14 +168,14 @@ class Group(object):
|
||||
for s in self.silents:
|
||||
sel = pool.select(str(s), solv.Selection.SELECTION_NAME | solv.Selection.SELECTION_FLAT)
|
||||
if sel.isempty():
|
||||
self.logger.warning('{}.{}: silent package {} not found'.format(self.name, arch, s))
|
||||
self.logger.warning(f'{self.name}.{arch}: silent package {s} not found')
|
||||
else:
|
||||
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
||||
|
||||
problems = solver.solve(jobs)
|
||||
if problems:
|
||||
for problem in problems:
|
||||
msg = 'unresolvable: {}:{}.{}: {}'.format(self.name, n, arch, problem)
|
||||
msg = f'unresolvable: {self.name}:{n}.{arch}: {problem}'
|
||||
self.logger.debug(msg)
|
||||
self.unresolvable[arch][n] = str(problem)
|
||||
return
|
||||
@ -361,7 +361,7 @@ class Group(object):
|
||||
root.append(c)
|
||||
|
||||
if arch != '*':
|
||||
ET.SubElement(root, 'conditional', {'name': 'only_{}'.format(arch)})
|
||||
ET.SubElement(root, 'conditional', {'name': f'only_{arch}'})
|
||||
packagelist = ET.SubElement(root, 'packagelist', {'relationship': 'recommends'})
|
||||
|
||||
missing = dict()
|
||||
@ -372,14 +372,14 @@ class Group(object):
|
||||
if name in self.silents:
|
||||
continue
|
||||
if name in missing:
|
||||
msg = ' {} not found on {}'.format(name, ','.join(sorted(missing[name])))
|
||||
msg = f" {name} not found on {','.join(sorted(missing[name]))}"
|
||||
if ignore_broken and name not in self.required:
|
||||
c = ET.Comment(msg)
|
||||
packagelist.append(c)
|
||||
continue
|
||||
name = msg
|
||||
if name in unresolvable:
|
||||
msg = ' {} uninstallable: {}'.format(name, unresolvable[name])
|
||||
msg = f' {name} uninstallable: {unresolvable[name]}'
|
||||
if ignore_broken and name not in self.required:
|
||||
c = ET.Comment(msg)
|
||||
packagelist.append(c)
|
||||
@ -393,7 +393,7 @@ class Group(object):
|
||||
attrs['supportstatus'] = status
|
||||
ET.SubElement(packagelist, 'package', attrs)
|
||||
if name in packages and packages[name]:
|
||||
c = ET.Comment(' reason: {} '.format(packages[name]))
|
||||
c = ET.Comment(f' reason: {packages[name]} ')
|
||||
packagelist.append(c)
|
||||
|
||||
return root
|
||||
|
@ -129,7 +129,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
for name in self.groups:
|
||||
group = self.groups[name]
|
||||
group.solved_packages = dict()
|
||||
fn = '{}.group'.format(group.name)
|
||||
fn = f'{group.name}.group'
|
||||
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
||||
for arch in archs:
|
||||
x = group.toxml(arch, group.ignore_broken, None)
|
||||
@ -189,7 +189,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
if not group.solved:
|
||||
continue
|
||||
summary[name] = group.summary()
|
||||
fn = '{}.group'.format(group.name)
|
||||
fn = f'{group.name}.group'
|
||||
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
||||
comment = group.comment
|
||||
for arch in archs:
|
||||
@ -247,7 +247,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
tocheck.add(s.name)
|
||||
|
||||
for locale in self.locales:
|
||||
id = pool.str2id('locale({})'.format(locale))
|
||||
id = pool.str2id(f'locale({locale})')
|
||||
for s in pool.whatprovides(id):
|
||||
tocheck_locales.add(s.name)
|
||||
|
||||
@ -283,7 +283,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
continue
|
||||
s = f'repo-{project}-{reponame}-{arch}-{state}.solv'
|
||||
if not repo.add_solv(s):
|
||||
raise MismatchedRepoException('failed to add repo {}/{}/{}'.format(project, reponame, arch))
|
||||
raise MismatchedRepoException(f'failed to add repo {project}/{reponame}/{arch}')
|
||||
for solvable in repo.solvables_iter():
|
||||
if ignore_conflicts:
|
||||
solvable.unset(solv.SOLVABLE_CONFLICTS)
|
||||
@ -432,11 +432,11 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
# Repo might not have this architecture
|
||||
continue
|
||||
|
||||
repo_solv_name = 'repo-{}-{}-{}.solv'.format(project, repo, arch)
|
||||
repo_solv_name = f'repo-{project}-{repo}-{arch}.solv'
|
||||
# Would be preferable to include hash in name, but cumbersome to handle without
|
||||
# reworking a fair bit since the state needs to be tracked.
|
||||
solv_file = os.path.join(CACHEDIR, repo_solv_name)
|
||||
solv_file_hash = '{}::{}'.format(solv_file, state)
|
||||
solv_file_hash = f'{solv_file}::{state}'
|
||||
if os.path.exists(solv_file) and os.path.exists(solv_file_hash):
|
||||
# Solve file exists and hash unchanged, skip updating solv.
|
||||
self.logger.debug('skipping solv generation for {} due to matching state {}'.format(
|
||||
@ -464,7 +464,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
pool.setarch()
|
||||
|
||||
# we need some progress in the debug output - or gocd gets nervous
|
||||
self.logger.debug('checking {}'.format(oldrepo))
|
||||
self.logger.debug(f'checking {oldrepo}')
|
||||
oldsysrepo = file_utils.add_susetags(pool, oldrepo)
|
||||
|
||||
for arch in self.all_architectures:
|
||||
@ -476,7 +476,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
fn = f'repo-{project}-{repo}-{arch}-{state}.solv'
|
||||
r = pool.add_repo('/'.join([project, repo]))
|
||||
if not r.add_solv(fn):
|
||||
raise MismatchedRepoException('failed to add repo {}/{}/{}.'.format(project, repo, arch))
|
||||
raise MismatchedRepoException(f'failed to add repo {project}/{repo}/{arch}.')
|
||||
|
||||
pool.createwhatprovides()
|
||||
|
||||
@ -534,7 +534,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
if not repo_output:
|
||||
print('#', repo, file=output)
|
||||
repo_output = True
|
||||
print('Provides: weakremover({})'.format(name), file=output)
|
||||
print(f'Provides: weakremover({name})', file=output)
|
||||
else:
|
||||
jarch = ' '.join(sorted(drops[name]['archs']))
|
||||
exclusives.setdefault(jarch, []).append(name)
|
||||
@ -543,9 +543,9 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
if not repo_output:
|
||||
print('#', repo, file=output)
|
||||
repo_output = True
|
||||
print('%ifarch {}'.format(arch), file=output)
|
||||
print(f'%ifarch {arch}', file=output)
|
||||
for name in sorted(exclusives[arch]):
|
||||
print('Provides: weakremover({})'.format(name), file=output)
|
||||
print(f'Provides: weakremover({name})', file=output)
|
||||
print('%endif', file=output)
|
||||
output.flush()
|
||||
|
||||
@ -628,7 +628,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
|
||||
def build_stub(self, destination, extension):
|
||||
with open(os.path.join(destination, '.'.join(['stub', extension])), 'w+') as f:
|
||||
f.write('# prevent building single {} files twice\n'.format(extension))
|
||||
f.write(f'# prevent building single {extension} files twice\n')
|
||||
f.write('Name: stub\n')
|
||||
f.write('Version: 0.0\n')
|
||||
|
||||
@ -645,7 +645,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
package.commit(msg='Automatic update', skip_local_service_run=True)
|
||||
|
||||
def replace_product_version(self, product_file, product_version):
|
||||
product_version = '<version>{}</version>'.format(product_version)
|
||||
product_version = f'<version>{product_version}</version>'
|
||||
lines = open(product_file).readlines()
|
||||
new_lines = []
|
||||
for line in lines:
|
||||
@ -670,7 +670,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
|
||||
self.use_newest_version = str2bool(target_config.get('pkglistgen-use-newest-version', 'False'))
|
||||
self.repos = self.expand_repos(project, main_repo)
|
||||
logging.debug('[{}] {}/{}: update and solve'.format(scope, project, main_repo))
|
||||
logging.debug(f'[{scope}] {project}/{main_repo}: update and solve')
|
||||
|
||||
group = target_config.get('pkglistgen-group', '000package-groups')
|
||||
product = target_config.get('pkglistgen-product', '000product')
|
||||
@ -691,7 +691,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
|
||||
repository=[main_repo], multibuild=True))
|
||||
if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
|
||||
logging.info('{}/{} build in progress'.format(project, product))
|
||||
logging.info(f'{project}/{product} build in progress')
|
||||
return
|
||||
if git_url:
|
||||
if os.path.exists(cache_dir + "/.git"):
|
||||
@ -711,21 +711,21 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
else:
|
||||
url = api.makeurl(['source', project])
|
||||
packages = ET.parse(http_GET(url)).getroot()
|
||||
if packages.find('entry[@name="{}"]'.format(product)) is None:
|
||||
if packages.find(f'entry[@name="{product}"]') is None:
|
||||
if not self.dry_run:
|
||||
undelete_package(api.apiurl, project, product, 'revive')
|
||||
# TODO disable build.
|
||||
logging.info('{} undeleted, skip dvd until next cycle'.format(product))
|
||||
logging.info(f'{product} undeleted, skip dvd until next cycle')
|
||||
return
|
||||
|
||||
drop_list = api.item_exists(project, oldrepos)
|
||||
if drop_list and not only_release_packages:
|
||||
checkout_list.append(oldrepos)
|
||||
|
||||
if packages.find('entry[@name="{}"]'.format(release)) is None:
|
||||
if packages.find(f'entry[@name="{release}"]') is None:
|
||||
if not self.dry_run:
|
||||
undelete_package(api.apiurl, project, release, 'revive')
|
||||
logging.info('{} undeleted, skip dvd until next cycle'.format(release))
|
||||
logging.info(f'{release} undeleted, skip dvd until next cycle')
|
||||
return
|
||||
|
||||
if not no_checkout:
|
||||
@ -743,7 +743,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
self.output_dir = product_dir
|
||||
|
||||
if not no_checkout and not git_url:
|
||||
logging.debug('Skipping checkout of {}'.format(project))
|
||||
logging.debug(f'Skipping checkout of {project}')
|
||||
for package in checkout_list:
|
||||
checkout_package(api.apiurl, project, package, expand_link=True,
|
||||
prj_dir=cache_dir, outdir=os.path.join(cache_dir, package))
|
||||
@ -833,7 +833,7 @@ class PkgListGen(ToolBase.ToolBase):
|
||||
logging.debug(subprocess.check_output(
|
||||
[PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))
|
||||
|
||||
for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
|
||||
for delete_kiwi in target_config.get(f'pkglistgen-delete-kiwis-{scope}', '').split(' '):
|
||||
delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
|
||||
file_utils.unlink_list(product_dir, delete_kiwis)
|
||||
if scope == 'staging':
|
||||
|
@ -95,7 +95,7 @@ def parse_repomd(repo, baseurl):
|
||||
sha = hashlib.sha256(primary.content).hexdigest()
|
||||
|
||||
if sha != sha_expected:
|
||||
raise Exception('checksums do not match {} != {}'.format(sha, sha_expected))
|
||||
raise Exception(f'checksums do not match {sha} != {sha_expected}')
|
||||
|
||||
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
||||
f.write(primary.content)
|
||||
@ -165,13 +165,13 @@ def print_repo_delta(pool, repo2, packages_file):
|
||||
present = dict()
|
||||
for s in pool.solvables_iter():
|
||||
if s.repo != repo2:
|
||||
key = '{}/{}'.format(s.name, s.arch)
|
||||
key = f'{s.name}/{s.arch}'
|
||||
present.setdefault(key, {})
|
||||
present[key][s.evr] = s.repo
|
||||
for s in repo2.solvables:
|
||||
if s.arch == 'src':
|
||||
continue
|
||||
key = '{}/{}'.format(s.name, s.arch)
|
||||
key = f'{s.name}/{s.arch}'
|
||||
if present.get(key, {}).get(s.evr):
|
||||
continue
|
||||
elif key not in present:
|
||||
@ -277,7 +277,7 @@ def update_project(apiurl, project, fixate=None):
|
||||
|
||||
if opts.get('refresh', False):
|
||||
opts['build'] = dump_solv_build(opts['url'])
|
||||
path = '{}_{}.packages'.format(key, opts['build'])
|
||||
path = f"{key}_{opts['build']}.packages"
|
||||
else:
|
||||
path = key + '.packages'
|
||||
packages_file = os.path.join(repo_dir, path)
|
||||
|
@ -44,13 +44,13 @@ class RepoChecker():
|
||||
if not repository:
|
||||
repository = self.project_repository(project)
|
||||
if not repository:
|
||||
self.logger.error('a repository must be specified via OSRT:Config main-repo for {}'.format(project))
|
||||
self.logger.error(f'a repository must be specified via OSRT:Config main-repo for {project}')
|
||||
return
|
||||
self.repository = repository
|
||||
|
||||
archs = target_archs(self.apiurl, project, repository)
|
||||
if not len(archs):
|
||||
self.logger.debug('{} has no relevant architectures'.format(project))
|
||||
self.logger.debug(f'{project} has no relevant architectures')
|
||||
return None
|
||||
|
||||
for arch in archs:
|
||||
@ -79,7 +79,7 @@ class RepoChecker():
|
||||
continue
|
||||
if comment.get('package') in comments:
|
||||
continue
|
||||
self.logger.info("Removing comment for package {}".format(comment.get('package')))
|
||||
self.logger.info(f"Removing comment for package {comment.get('package')}")
|
||||
url = makeurl(self.apiurl, ['comment', comment.get('id')])
|
||||
http_DELETE(url)
|
||||
|
||||
@ -101,7 +101,7 @@ class RepoChecker():
|
||||
|
||||
if oldcomment:
|
||||
commentapi.delete(oldcomment['id'])
|
||||
self.logger.debug("Adding comment to {}/{}".format(self.project, package))
|
||||
self.logger.debug(f"Adding comment to {self.project}/{package}")
|
||||
commentapi.add_comment(project_name=self.project, package_name=package, comment=newcomment)
|
||||
|
||||
def _split_and_filter(self, output):
|
||||
@ -123,7 +123,7 @@ class RepoChecker():
|
||||
def project_repository(self, project):
|
||||
repository = Config.get(self.apiurl, project).get('main-repo')
|
||||
if not repository:
|
||||
self.logger.debug('no main-repo defined for {}'.format(project))
|
||||
self.logger.debug(f'no main-repo defined for {project}')
|
||||
|
||||
search_project = 'openSUSE:Factory'
|
||||
for search_repository in ('snapshot', 'standard'):
|
||||
@ -142,7 +142,7 @@ class RepoChecker():
|
||||
return
|
||||
|
||||
state_yaml = yaml.dump(state, default_flow_style=False)
|
||||
comment = 'Updated rebuild infos for {}/{}/{}'.format(self.project, self.repository, self.arch)
|
||||
comment = f'Updated rebuild infos for {self.project}/{self.repository}/{self.arch}'
|
||||
source_file_ensure(self.apiurl, self.store_project, self.store_package,
|
||||
self.store_filename, state_yaml, comment=comment)
|
||||
|
||||
@ -157,7 +157,7 @@ class RepoChecker():
|
||||
for rpm, rcode in buildresult.items():
|
||||
if rcode != code:
|
||||
continue
|
||||
source = "{}/{}/{}/{}".format(self.project, self.repository, self.arch, rpm)
|
||||
source = f"{self.project}/{self.repository}/{self.arch}/{rpm}"
|
||||
if source not in oldstate[code]:
|
||||
oldstate[code][source] = str(datetime.now())
|
||||
|
||||
@ -165,7 +165,7 @@ class RepoChecker():
|
||||
config = Config.get(self.apiurl, project)
|
||||
|
||||
oldstate = None
|
||||
self.store_filename = 'rebuildpacs.{}-{}.yaml'.format(project, repository)
|
||||
self.store_filename = f'rebuildpacs.{project}-{repository}.yaml'
|
||||
if self.store_project and self.store_package:
|
||||
state_yaml = source_file_load(self.apiurl, self.store_project, self.store_package,
|
||||
self.store_filename)
|
||||
@ -244,7 +244,7 @@ class RepoChecker():
|
||||
config.get(f'installcheck-ignore-conflicts-{arch}', '').split()
|
||||
|
||||
for package, entry in parsed.items():
|
||||
source = "{}/{}/{}/{}".format(project, repository, arch, entry['source'])
|
||||
source = f"{project}/{repository}/{arch}/{entry['source']}"
|
||||
per_source.setdefault(source, {'output': [], 'buildresult': buildresult.get(entry['source'], 'gone'), 'ignored': True})
|
||||
per_source[source]['output'].extend(entry['output'])
|
||||
if package not in ignore_conflicts:
|
||||
@ -255,7 +255,7 @@ class RepoChecker():
|
||||
for source in sorted(per_source):
|
||||
if not len(per_source[source]['output']):
|
||||
continue
|
||||
self.logger.debug("{} builds: {}".format(source, per_source[source]['buildresult']))
|
||||
self.logger.debug(f"{source} builds: {per_source[source]['buildresult']}")
|
||||
self.logger.debug(" " + "\n ".join(per_source[source]['output']))
|
||||
if per_source[source]['buildresult'] != 'succeeded': # nothing we can do
|
||||
continue
|
||||
@ -276,7 +276,7 @@ class RepoChecker():
|
||||
'rebuild': str(datetime.now())}
|
||||
|
||||
for source in list(oldstate['check']):
|
||||
if not source.startswith('{}/{}/{}/'.format(project, repository, arch)):
|
||||
if not source.startswith(f'{project}/{repository}/{arch}/'):
|
||||
continue
|
||||
code = buildresult.get(os.path.basename(source), 'gone')
|
||||
if code == 'gone' or code == 'excluded':
|
||||
@ -319,7 +319,7 @@ class RepoChecker():
|
||||
m = hashlib.sha256()
|
||||
for bdep in sorted(infos[package]['deps']):
|
||||
m.update(bytes(bdep + '-' + infos[package]['deps'][bdep], 'utf-8'))
|
||||
state_key = '{}/{}/{}/{}'.format(project, repository, arch, package)
|
||||
state_key = f'{project}/{repository}/{arch}/{package}'
|
||||
olddigest = oldstate['leafs'].get(state_key, {}).get('buildinfo')
|
||||
if olddigest == m.hexdigest():
|
||||
continue
|
||||
|
@ -19,7 +19,7 @@ class Requestfinder(ToolBase.ToolBase):
|
||||
|
||||
def fill_package_meta(self, project):
|
||||
self.package_metas = dict()
|
||||
url = osc.core.makeurl(self.apiurl, ['search', 'package'], "match=[@project='%s']" % project)
|
||||
url = osc.core.makeurl(self.apiurl, ['search', 'package'], f"match=[@project='{project}']")
|
||||
root = ET.fromstring(self.cached_GET(url))
|
||||
for p in root.findall('package'):
|
||||
name = p.attrib['name']
|
||||
@ -70,7 +70,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
||||
return tool
|
||||
|
||||
def _load_settings(self, settings, name):
|
||||
section = 'settings {}'.format(name)
|
||||
section = f'settings {name}'
|
||||
for option in settings.keys():
|
||||
if self.cp.has_option(section, option):
|
||||
settings[option] = self.cp.get(section, option).replace('\n', ' ')
|
||||
@ -209,7 +209,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
||||
rqs = self.tool.find_requests(settings)
|
||||
for r in rqs:
|
||||
self.print_actions(r)
|
||||
print("osc rq {} -m '{}' {}".format(settings['action'], settings['message'], r.reqid))
|
||||
print(f"osc rq {settings['action']} -m '{settings['message']}' {r.reqid}")
|
||||
|
||||
def help_examples(self):
|
||||
return """$ cat > ~/.config/opensuse-release-tools/requestfinder.conf << EOF
|
||||
|
@ -299,7 +299,7 @@ class SkippkgFinder(object):
|
||||
if index in package_binaries:
|
||||
selected_binarylist += package_binaries[index]
|
||||
else:
|
||||
logging.info("Can not find binary of %s" % index)
|
||||
logging.info(f"Can not find binary of {index}")
|
||||
|
||||
# Some packages has been obsoleted by new updated package, however
|
||||
# there are application still depend on old library when it builds
|
||||
|
@ -85,7 +85,7 @@ class InstallChecker(object):
|
||||
if provided_by.get('name') in built_binaries:
|
||||
provided_found = True
|
||||
else:
|
||||
comments.append(' also provided by {} -> ignoring'.format(provided_by.get('name')))
|
||||
comments.append(f" also provided by {provided_by.get('name')} -> ignoring")
|
||||
alternative_found = True
|
||||
|
||||
if not alternative_found:
|
||||
@ -104,7 +104,7 @@ class InstallChecker(object):
|
||||
if result:
|
||||
return True
|
||||
else:
|
||||
comments.append('Error: missing alternative provides for {}'.format(provide))
|
||||
comments.append(f'Error: missing alternative provides for {provide}')
|
||||
return False
|
||||
|
||||
@memoize(session=True)
|
||||
@ -120,7 +120,7 @@ class InstallChecker(object):
|
||||
def check_delete_request(self, req, to_ignore, to_delete, comments):
|
||||
package = req.get('package')
|
||||
if package in to_ignore or self.ignore_deletes:
|
||||
self.logger.info('Delete request for package {} ignored'.format(package))
|
||||
self.logger.info(f'Delete request for package {package} ignored')
|
||||
return True
|
||||
|
||||
pkg_flavors = self.pkg_with_multibuild_flavors(package)
|
||||
@ -190,10 +190,10 @@ class InstallChecker(object):
|
||||
|
||||
all_done = True
|
||||
for arch in architectures:
|
||||
pra = '{}/{}/{}'.format(project, repository, arch)
|
||||
pra = f'{project}/{repository}/{arch}'
|
||||
buildid = self.buildid(project, repository, arch)
|
||||
if not buildid:
|
||||
self.logger.error('No build ID in {}'.format(pra))
|
||||
self.logger.error(f'No build ID in {pra}')
|
||||
return False
|
||||
buildids[arch] = buildid
|
||||
url = self.report_url(project, repository, arch, buildid)
|
||||
@ -201,11 +201,11 @@ class InstallChecker(object):
|
||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||
check = root.find('check[@name="installcheck"]/state')
|
||||
if check is not None and check.text != 'pending':
|
||||
self.logger.info('{} already "{}", ignoring'.format(pra, check.text))
|
||||
self.logger.info(f'{pra} already "{check.text}", ignoring')
|
||||
else:
|
||||
all_done = False
|
||||
except HTTPError:
|
||||
self.logger.info('{} has no status report'.format(pra))
|
||||
self.logger.info(f'{pra} has no status report')
|
||||
all_done = False
|
||||
|
||||
if all_done and not force:
|
||||
@ -218,7 +218,7 @@ class InstallChecker(object):
|
||||
to_ignore = self.packages_to_ignore(project)
|
||||
status = api.project_status(project)
|
||||
if status is None:
|
||||
self.logger.error('no project status for {}'.format(project))
|
||||
self.logger.error(f'no project status for {project}')
|
||||
return False
|
||||
|
||||
# collect packages to be deleted
|
||||
@ -282,9 +282,9 @@ class InstallChecker(object):
|
||||
if result:
|
||||
self.report_state('success', self.gocd_url(), project, repository, buildids)
|
||||
else:
|
||||
result_comment.insert(0, 'Generated from {}\n'.format(self.gocd_url()))
|
||||
result_comment.insert(0, f'Generated from {self.gocd_url()}\n')
|
||||
self.report_state('failure', self.upload_failure(project, result_comment), project, repository, buildids)
|
||||
self.logger.warning('Not accepting {}'.format(project))
|
||||
self.logger.warning(f'Not accepting {project}')
|
||||
return False
|
||||
|
||||
return result
|
||||
@ -295,7 +295,7 @@ class InstallChecker(object):
|
||||
osc.core.http_PUT(url, data='\n'.join(comment))
|
||||
|
||||
url = self.api.apiurl.replace('api.', 'build.')
|
||||
return '{}/package/view_file/home:repo-checker/reports/{}'.format(url, project)
|
||||
return f'{url}/package/view_file/home:repo-checker/reports/{project}'
|
||||
|
||||
def report_state(self, state, report_url, project, repository, buildids):
|
||||
architectures = self.target_archs(project, repository)
|
||||
@ -357,10 +357,10 @@ class InstallChecker(object):
|
||||
return sorted(archs, reverse=True)
|
||||
|
||||
def install_check(self, directories, arch, whitelist, ignored_conflicts):
|
||||
self.logger.info('install check: start (whitelist:{})'.format(','.join(whitelist)))
|
||||
self.logger.info(f"install check: start (whitelist:{','.join(whitelist)})")
|
||||
parts = installcheck(directories, arch, whitelist, ignored_conflicts)
|
||||
if len(parts):
|
||||
header = '### [install check & file conflicts for {}]'.format(arch)
|
||||
header = f'### [install check & file conflicts for {arch}]'
|
||||
return CheckResult(False, header + '\n\n' + ('\n' + ('-' * 80) + '\n\n').join(parts))
|
||||
|
||||
self.logger.info('install check: passed')
|
||||
@ -373,7 +373,7 @@ class InstallChecker(object):
|
||||
self.allowed_cycles.append(comma_list.split(','))
|
||||
|
||||
def cycle_check(self, project, repository, arch):
|
||||
self.logger.info('cycle check: start %s/%s/%s' % (project, repository, arch))
|
||||
self.logger.info(f'cycle check: start {project}/{repository}/{arch}')
|
||||
comment = []
|
||||
|
||||
depinfo = builddepinfo(self.api.apiurl, project, repository, arch, order=False)
|
||||
@ -387,7 +387,7 @@ class InstallChecker(object):
|
||||
break
|
||||
if not allowed:
|
||||
cycled = [p.text for p in cycle.findall('package')]
|
||||
comment.append('Package {} appears in cycle {}'.format(package, '/'.join(cycled)))
|
||||
comment.append(f"Package {package} appears in cycle {'/'.join(cycled)}")
|
||||
|
||||
if len(comment):
|
||||
# New cycles, post comment.
|
||||
|
@ -25,7 +25,7 @@ class StagingReport(object):
|
||||
package.get('package'),
|
||||
package.get('repository'),
|
||||
package.get('arch'))
|
||||
text = '[%s](%s)' % (package.get('arch'), link)
|
||||
text = f"[{package.get('arch')}]({link})"
|
||||
return text
|
||||
|
||||
def old_enough(self, _date):
|
||||
@ -56,13 +56,13 @@ class StagingReport(object):
|
||||
groups[package.get('package')].append(package)
|
||||
|
||||
failing_lines = [
|
||||
'* Build failed %s (%s)' % (key, ', '.join(self._package_url(p) for p in value))
|
||||
f"* Build failed {key} ({', '.join(self._package_url(p) for p in value)})"
|
||||
for key, value in groups.items()
|
||||
]
|
||||
|
||||
report = '\n'.join(failing_lines[:MAX_LINES])
|
||||
if len(failing_lines) > MAX_LINES:
|
||||
report += '* and more (%s) ...' % (len(failing_lines) - MAX_LINES)
|
||||
report += f'* and more ({len(failing_lines) - MAX_LINES}) ...'
|
||||
return report
|
||||
|
||||
def report_checks(self, info):
|
||||
@ -70,7 +70,7 @@ class StagingReport(object):
|
||||
for check in info.findall('checks/check'):
|
||||
state = check.find('state').text
|
||||
links_state.setdefault(state, [])
|
||||
links_state[state].append('[{}]({})'.format(check.get('name'), check.find('url').text))
|
||||
links_state[state].append(f"[{check.get('name')}]({check.find('url').text})")
|
||||
|
||||
lines = []
|
||||
failure = False
|
||||
@ -78,14 +78,14 @@ class StagingReport(object):
|
||||
if len(links) > MAX_LINES:
|
||||
extra = len(links) - MAX_LINES
|
||||
links = links[:MAX_LINES]
|
||||
links.append('and {} more...'.format(extra))
|
||||
links.append(f'and {extra} more...')
|
||||
|
||||
lines.append('- {}'.format(state))
|
||||
lines.append(f'- {state}')
|
||||
if state != 'success':
|
||||
lines.extend([' - {}'.format(link) for link in links])
|
||||
lines.extend([f' - {link}' for link in links])
|
||||
failure = True
|
||||
else:
|
||||
lines[-1] += ': {}'.format(', '.join(links))
|
||||
lines[-1] += f": {', '.join(links)}"
|
||||
|
||||
return '\n'.join(lines).strip(), failure
|
||||
|
||||
|
@ -98,10 +98,10 @@ class StagingHelper(object):
|
||||
print("There is no support_pkg_rebuild file!")
|
||||
return
|
||||
|
||||
logging.info('Gathering support package list from %s' % self.project)
|
||||
logging.info(f'Gathering support package list from {self.project}')
|
||||
support_pkgs = self.get_support_package_list(self.project, 'standard')
|
||||
files = self.process_project_binarylist(self.project, 'standard', 'x86_64')
|
||||
staging_projects = ["%s:%s" % (self.api.cstaging, p) for p in self.api.get_staging_projects_short()]
|
||||
staging_projects = [f"{self.api.cstaging}:{p}" for p in self.api.get_staging_projects_short()]
|
||||
cand_sources = defaultdict(list)
|
||||
for stg in staging_projects:
|
||||
status = self.api.project_status(stg, status=False)
|
||||
@ -146,7 +146,7 @@ class StagingHelper(object):
|
||||
need_rebuild = True
|
||||
|
||||
if need_rebuild and not self.api.is_repo_dirty(stgname, 'standard'):
|
||||
logging.info('Rebuild %s' % stgname)
|
||||
logging.info(f'Rebuild {stgname}')
|
||||
osc.core.rebuild(self.apiurl, stgname, None, None, None)
|
||||
stg.find('rebuild').text = 'unneeded'
|
||||
|
||||
@ -174,7 +174,7 @@ if __name__ == '__main__':
|
||||
parser.add_argument('-d', '--debug', action='store_true',
|
||||
help='print info useful for debuging')
|
||||
parser.add_argument('-p', '--project', dest='project', metavar='PROJECT',
|
||||
help='deafult project (default: %s)' % OPENSUSE,
|
||||
help=f'deafult project (default: {OPENSUSE})',
|
||||
default=OPENSUSE)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
@ -71,14 +71,14 @@ class TestCase(unittest.TestCase):
|
||||
with open(OSCRC, 'w+') as f:
|
||||
f.write('\n'.join([
|
||||
'[general]',
|
||||
'apiurl = {}'.format(APIURL),
|
||||
f'apiurl = {APIURL}',
|
||||
'http_debug = false',
|
||||
'debug = false',
|
||||
'cookiejar = {}'.format(OSCCOOKIEJAR),
|
||||
'[{}]'.format(APIURL),
|
||||
'user = {}'.format(userid),
|
||||
f'cookiejar = {OSCCOOKIEJAR}',
|
||||
f'[{APIURL}]',
|
||||
f'user = {userid}',
|
||||
'pass = opensuse',
|
||||
'email = {}@example.com'.format(userid),
|
||||
f'email = {userid}@example.com',
|
||||
# allow plain http even if it is insecure; we're testing after all
|
||||
'allow_http = 1',
|
||||
# disable cert checking to allow self-signed certs
|
||||
@ -175,10 +175,10 @@ class TestCase(unittest.TestCase):
|
||||
for review in request.reviews:
|
||||
for key, value in kwargs.items():
|
||||
if hasattr(review, key) and getattr(review, key) == value[0]:
|
||||
self.assertEqual(review.state, value[1], '{}={} not {}'.format(key, value[0], value[1]))
|
||||
self.assertEqual(review.state, value[1], f'{key}={value[0]} not {value[1]}')
|
||||
return review
|
||||
|
||||
self.fail('{} not found'.format(kwargs))
|
||||
self.fail(f'{kwargs} not found')
|
||||
|
||||
def assertReviewScript(self, request_id, user, before, after, comment=None):
|
||||
"""Asserts the review script pointed by the ``script`` attribute of the current test can
|
||||
@ -356,16 +356,16 @@ class StagingWorkflow(ABC):
|
||||
if name not in self.attr_types[namespace]:
|
||||
self.attr_types[namespace].append(name)
|
||||
|
||||
meta = """
|
||||
<namespace name='{}'>
|
||||
meta = f"""
|
||||
<namespace name='{namespace}'>
|
||||
<modifiable_by user='Admin'/>
|
||||
</namespace>""".format(namespace)
|
||||
</namespace>"""
|
||||
url = osc.core.makeurl(APIURL, ['attribute', namespace, '_meta'])
|
||||
osc.core.http_PUT(url, data=meta)
|
||||
|
||||
meta = "<definition name='{}' namespace='{}'><description/>".format(name, namespace)
|
||||
meta = f"<definition name='{name}' namespace='{namespace}'><description/>"
|
||||
if values:
|
||||
meta += "<count>{}</count>".format(values)
|
||||
meta += f"<count>{values}</count>"
|
||||
meta += "<modifiable_by role='maintainer'/></definition>"
|
||||
url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
|
||||
osc.core.http_PUT(url, data=meta)
|
||||
@ -418,11 +418,11 @@ class StagingWorkflow(ABC):
|
||||
:param users: list of users to be in group
|
||||
:type users: list(str)
|
||||
"""
|
||||
meta = """
|
||||
meta = f"""
|
||||
<group>
|
||||
<title>{}</title>
|
||||
<title>{name}</title>
|
||||
</group>
|
||||
""".format(name)
|
||||
"""
|
||||
|
||||
if len(users):
|
||||
root = ET.fromstring(meta)
|
||||
@ -450,13 +450,13 @@ class StagingWorkflow(ABC):
|
||||
"""
|
||||
if name in self.users:
|
||||
return
|
||||
meta = """
|
||||
meta = f"""
|
||||
<person>
|
||||
<login>{}</login>
|
||||
<email>{}@example.com</email>
|
||||
<login>{name}</login>
|
||||
<email>{name}@example.com</email>
|
||||
<state>confirmed</state>
|
||||
</person>
|
||||
""".format(name, name)
|
||||
"""
|
||||
self.users.append(name)
|
||||
url = osc.core.makeurl(APIURL, ['person', name])
|
||||
osc.core.http_PUT(url, data=meta)
|
||||
@ -620,7 +620,7 @@ class StagingWorkflow(ABC):
|
||||
:type namespace: str
|
||||
"""
|
||||
for name in self.attr_types[namespace]:
|
||||
print('deleting attribute type {}:{}'.format(namespace, name))
|
||||
print(f'deleting attribute type {namespace}:{name}')
|
||||
url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
|
||||
self._safe_delete(url)
|
||||
print('deleting namespace', namespace)
|
||||
@ -695,7 +695,7 @@ class FactoryWorkflow(StagingWorkflow):
|
||||
self.create_link(target_wine, self.projects['ring1'])
|
||||
|
||||
def create_staging(self, suffix, freeze=False, rings=None, with_repo=False):
|
||||
staging_key = 'staging:{}'.format(suffix)
|
||||
staging_key = f'staging:{suffix}'
|
||||
# do not reattach if already present
|
||||
if staging_key not in self.projects:
|
||||
staging_name = self.project + ':Staging:' + suffix
|
||||
@ -846,11 +846,11 @@ class Project(object):
|
||||
:param with_repo: whether a repository should be created as part of the meta
|
||||
:type with_repo: bool
|
||||
"""
|
||||
meta = """
|
||||
<project name="{0}">
|
||||
meta = f"""
|
||||
<project name="{self.name}">
|
||||
<title></title>
|
||||
<description></description>
|
||||
</project>""".format(self.name)
|
||||
</project>"""
|
||||
|
||||
root = ET.fromstring(meta)
|
||||
for group in reviewer.get('groups', []):
|
||||
@ -983,11 +983,11 @@ class Package(object):
|
||||
self.name = name
|
||||
self.project = project
|
||||
|
||||
meta = """
|
||||
<package project="{1}" name="{0}">
|
||||
meta = f"""
|
||||
<package project="{self.project.name}" name="{self.name}">
|
||||
<title></title>
|
||||
<description></description>
|
||||
</package>""".format(self.name, self.project.name)
|
||||
</package>"""
|
||||
|
||||
if devel_project:
|
||||
root = ET.fromstring(meta)
|
||||
@ -996,7 +996,7 @@ class Package(object):
|
||||
|
||||
url = osc.core.make_meta_url('pkg', (self.project.name, self.name), APIURL)
|
||||
osc.core.http_PUT(url, data=meta)
|
||||
print('created {}/{}'.format(self.project.name, self.name))
|
||||
print(f'created {self.project.name}/{self.name}')
|
||||
self.project.add_package(self)
|
||||
|
||||
# delete from instance
|
||||
|
@ -64,7 +64,7 @@ class TestAccept(unittest.TestCase):
|
||||
# check which id was added
|
||||
new_id = (set(comments.keys()) - set(self.comments.keys())).pop()
|
||||
comment = comments[new_id]['comment']
|
||||
ncomment = 'Project "{}" accepted. '.format(self.prj)
|
||||
ncomment = f'Project "{self.prj}" accepted. '
|
||||
ncomment += "The following packages have been submitted to openSUSE:Factory: wine."
|
||||
self.assertEqual(ncomment, comment)
|
||||
|
||||
|
@ -117,7 +117,7 @@ class TestApiCalls(OBSLocal.TestCase):
|
||||
|
||||
# Verify that review is closed
|
||||
rq = self.winerq.xml()
|
||||
xpath = "//review[@name='new' and @by_project='{}']".format(self.staging_b.name)
|
||||
xpath = f"//review[@name='new' and @by_project='{self.staging_b.name}']"
|
||||
self.assertIsNotNone(rq.xpath(xpath))
|
||||
|
||||
def test_add_sr(self):
|
||||
|
@ -79,7 +79,7 @@ class TestCheckSource(OBSLocal.TestCase):
|
||||
self.review_bot.check_requests()
|
||||
|
||||
review = self.assertReview(req_id, by_user=(self.bot_user, 'declined'))
|
||||
self.assertIn('%s is not a devel project of %s' % (SRC_PROJECT, PROJECT), review.comment)
|
||||
self.assertIn(f'{SRC_PROJECT} is not a devel project of {PROJECT}', review.comment)
|
||||
|
||||
@pytest.mark.usefixtures("required_source_maintainer")
|
||||
def test_devel_project(self):
|
||||
@ -192,11 +192,11 @@ class TestCheckSource(OBSLocal.TestCase):
|
||||
review = self.assertReview(req.reqid, by_user=(self.bot_user, 'declined'))
|
||||
add_role_req = get_request_list(self.wf.apiurl, SRC_PROJECT, req_state=['new'], req_type='add_role')[0]
|
||||
|
||||
self.assertIn('unless %s is a maintainer of %s' % (FACTORY_MAINTAINERS, SRC_PROJECT), review.comment)
|
||||
self.assertIn('Created the add_role request %s' % add_role_req.reqid, review.comment)
|
||||
self.assertIn(f'unless {FACTORY_MAINTAINERS} is a maintainer of {SRC_PROJECT}', review.comment)
|
||||
self.assertIn(f'Created the add_role request {add_role_req.reqid}', review.comment)
|
||||
|
||||
self.assertEqual(add_role_req.actions[0].tgt_project, SRC_PROJECT)
|
||||
self.assertEqual('Created automatically from request %s' % req.reqid, add_role_req.description)
|
||||
self.assertEqual(f'Created automatically from request {req.reqid}', add_role_req.description)
|
||||
|
||||
# reopen request and do it again to test that new add_role request won't be created
|
||||
req.change_state('new')
|
||||
@ -247,11 +247,11 @@ class TestCheckSource(OBSLocal.TestCase):
|
||||
review = self.assertReview(req.reqid, by_user=(self.bot_user, 'declined'))
|
||||
add_role_req = get_request_list(self.wf.apiurl, SRC_PROJECT, req_state=['new'], req_type='add_role')[0]
|
||||
|
||||
self.assertIn('unless %s is a maintainer of %s' % (FACTORY_MAINTAINERS, SRC_PROJECT), review.comment)
|
||||
self.assertIn('Created the add_role request %s' % add_role_req.reqid, review.comment)
|
||||
self.assertIn(f'unless {FACTORY_MAINTAINERS} is a maintainer of {SRC_PROJECT}', review.comment)
|
||||
self.assertIn(f'Created the add_role request {add_role_req.reqid}', review.comment)
|
||||
|
||||
self.assertEqual(add_role_req.actions[0].tgt_project, SRC_PROJECT)
|
||||
self.assertEqual('Created automatically from request %s' % req.reqid, add_role_req.description)
|
||||
self.assertEqual(f'Created automatically from request {req.reqid}', add_role_req.description)
|
||||
|
||||
@pytest.mark.usefixtures("default_config")
|
||||
def test_bad_rpmlintrc(self):
|
||||
|
@ -15,8 +15,8 @@ class TestComment(unittest.TestCase):
|
||||
self.api = CommentAPI('bogus')
|
||||
self.bot = type(self).__name__
|
||||
self.comments = {
|
||||
1: {'comment': '<!-- {} -->\n\nshort comment'.format(self.bot)},
|
||||
2: {'comment': '<!-- {} foo=bar distro=openSUSE -->\n\nshort comment'.format(self.bot)}
|
||||
1: {'comment': f'<!-- {self.bot} -->\n\nshort comment'},
|
||||
2: {'comment': f'<!-- {self.bot} foo=bar distro=openSUSE -->\n\nshort comment'}
|
||||
}
|
||||
|
||||
def test_truncate(self):
|
||||
@ -52,7 +52,7 @@ handle
|
||||
truncated = self.api.truncate(comment, length=i)
|
||||
print('=' * 80)
|
||||
print(truncated)
|
||||
self.assertTrue(len(truncated) <= i, '{} <= {}'.format(len(truncated), i))
|
||||
self.assertTrue(len(truncated) <= i, f'{len(truncated)} <= {i}')
|
||||
self.assertEqual(truncated.count('<pre>'), truncated.count('</pre>'))
|
||||
self.assertFalse(len(re.findall(r'</?\w+[^\w>]', truncated)))
|
||||
tag_count = truncated.count('<pre>') + truncated.count('</pre>')
|
||||
|
@ -25,7 +25,7 @@ class MockedContainerCleaner(ContainerCleaner):
|
||||
|
||||
return ret
|
||||
else:
|
||||
raise RuntimeError("Path %s not expected" % path)
|
||||
raise RuntimeError(f"Path {path} not expected")
|
||||
|
||||
def getDirBinaries(self, path):
|
||||
"""Mock certain OBS APIs returning a list of binaries"""
|
||||
@ -37,7 +37,7 @@ class MockedContainerCleaner(ContainerCleaner):
|
||||
|
||||
return []
|
||||
else:
|
||||
raise RuntimeError("Path %s not expected" % path)
|
||||
raise RuntimeError(f"Path {path} not expected")
|
||||
|
||||
|
||||
class TestContainerCleaner(unittest.TestCase):
|
||||
|
@ -28,13 +28,13 @@ class TestFreeze(OBSLocal.TestCase):
|
||||
|
||||
fp = self._get_fixture_path('staging-meta-for-bootstrap-copy.xml')
|
||||
fc.prj = 'openSUSE:Factory:Staging:A'
|
||||
fixture = subprocess.check_output('/usr/bin/xmllint --format %s' % fp, shell=True).decode('utf-8')
|
||||
fixture = subprocess.check_output(f'/usr/bin/xmllint --format {fp}', shell=True).decode('utf-8')
|
||||
|
||||
f = tempfile.NamedTemporaryFile(delete=False)
|
||||
f.write(fc.prj_meta_for_bootstrap_copy())
|
||||
f.close()
|
||||
|
||||
output = subprocess.check_output('/usr/bin/xmllint --format %s' % f.name, shell=True).decode('utf-8')
|
||||
output = subprocess.check_output(f'/usr/bin/xmllint --format {f.name}', shell=True).decode('utf-8')
|
||||
|
||||
for line in difflib.unified_diff(fixture.split("\n"), output.split("\n")):
|
||||
print(line)
|
||||
|
@ -189,7 +189,7 @@ class TestOrigin(OBSLocal.TestCase):
|
||||
CommentAPI(self.wf.api.apiurl).add_comment(
|
||||
request_id=request.reqid, comment=f'@{self.bot_user} change_devel')
|
||||
|
||||
comment = 'change_devel command by {}'.format('Admin')
|
||||
comment = 'change_devel command by Admin'
|
||||
else:
|
||||
comment = 'only devel origin allowed'
|
||||
|
||||
|
@ -53,7 +53,7 @@ class ToTestManager(ToolBase.ToolBase):
|
||||
self.api = StagingAPI(self.apiurl, project=project)
|
||||
|
||||
def version_file(self, target):
|
||||
return 'version_%s' % target
|
||||
return f'version_{target}'
|
||||
|
||||
def write_version_to_dashboard(self, target, version):
|
||||
if self.dryrun or self.project.do_not_release:
|
||||
@ -70,7 +70,7 @@ class ToTestManager(ToolBase.ToolBase):
|
||||
r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz|\.tar\.xz|\.raw\.xz|\.appx)', binary)
|
||||
if result:
|
||||
return result.group(1)
|
||||
raise NotFoundException("can't find %s iso version" % project)
|
||||
raise NotFoundException(f"can't find {project} iso version")
|
||||
|
||||
def version_from_totest_project(self):
|
||||
if len(self.project.main_products):
|
||||
@ -103,7 +103,7 @@ class ToTestManager(ToolBase.ToolBase):
|
||||
result = re.match(r'.*-Build(.*)-Media1.report', binary)
|
||||
if result:
|
||||
return result.group(1)
|
||||
raise NotFoundException("can't find %s ftp version" % project)
|
||||
raise NotFoundException(f"can't find {project} ftp version")
|
||||
|
||||
# make sure to update the attribute as atomic as possible - as such
|
||||
# only update the snapshot and don't erase anything else. The snapshots
|
||||
@ -113,7 +113,7 @@ class ToTestManager(ToolBase.ToolBase):
|
||||
status_dict = self.get_status_dict()
|
||||
if status_dict.get(status) == snapshot:
|
||||
return
|
||||
self.logger.info('setting {} snapshot to {} (previously {})'.format(status, snapshot, status_dict.get(status)))
|
||||
self.logger.info(f'setting {status} snapshot to {snapshot} (previously {status_dict.get(status)})')
|
||||
if self.dryrun:
|
||||
return
|
||||
if status_dict.get(status) != snapshot:
|
||||
@ -152,7 +152,7 @@ class ToTestManager(ToolBase.ToolBase):
|
||||
|
||||
url = self.api.makeurl(baseurl, query=query)
|
||||
if self.dryrun or self.project.do_not_release:
|
||||
self.logger.info('release %s/%s (%s)' % (project, package, query))
|
||||
self.logger.info(f'release {project}/{package} ({query})')
|
||||
else:
|
||||
self.api.retried_POST(url)
|
||||
|
||||
|
@ -50,7 +50,7 @@ class ToTestPublisher(ToTestManager):
|
||||
self.failed_ignored_jobs = []
|
||||
|
||||
if len(jobs) < self.project.jobs_num: # not yet scheduled
|
||||
self.logger.warning('we have only %s jobs' % len(jobs))
|
||||
self.logger.warning(f'we have only {len(jobs)} jobs')
|
||||
return QAResult.inprogress
|
||||
|
||||
in_progress = False
|
||||
@ -92,7 +92,7 @@ class ToTestPublisher(ToTestManager):
|
||||
# remove flag - unfortunately can't delete comment unless admin
|
||||
data = {'text': text}
|
||||
if self.dryrun:
|
||||
self.logger.info('Would label {} with: {}'.format(job['id'], text))
|
||||
self.logger.info(f"Would label {job['id']} with: {text}")
|
||||
else:
|
||||
self.openqa.openqa_request(
|
||||
'PUT', 'jobs/%s/comments/%d' % (job['id'], labeled), data=data)
|
||||
@ -103,12 +103,12 @@ class ToTestPublisher(ToTestManager):
|
||||
if not labeled and len(refs) > 0:
|
||||
data = {'text': 'label:unknown_failure'}
|
||||
if self.dryrun:
|
||||
self.logger.info('Would label {} as unknown'.format(job['id']))
|
||||
self.logger.info(f"Would label {job['id']} as unknown")
|
||||
else:
|
||||
self.openqa.openqa_request(
|
||||
'POST', 'jobs/%s/comments' % job['id'], data=data)
|
||||
'POST', f"jobs/{job['id']}/comments", data=data)
|
||||
|
||||
joburl = '%s/tests/%s' % (self.project.openqa_server, job['id'])
|
||||
joburl = f"{self.project.openqa_server}/tests/{job['id']}"
|
||||
self.logger.info('job %s failed, see %s', job['name'], joburl)
|
||||
|
||||
elif job['result'] == 'passed' or job['result'] == 'softfailed':
|
||||
@ -137,7 +137,7 @@ class ToTestPublisher(ToTestManager):
|
||||
|
||||
self.logger.debug('Sending AMQP message')
|
||||
inf = re.sub(r'ed$', '', str(current_result))
|
||||
msg_topic = '%s.ttm.build.%s' % (self.project.base.lower(), inf)
|
||||
msg_topic = f'{self.project.base.lower()}.ttm.build.{inf}'
|
||||
msg_body = json.dumps({
|
||||
'build': current_snapshot,
|
||||
'project': self.project.name,
|
||||
@ -158,9 +158,9 @@ class ToTestPublisher(ToTestManager):
|
||||
notify_connection.close()
|
||||
break
|
||||
except pika.exceptions.ConnectionClosed as e:
|
||||
self.logger.warning('Sending AMQP event did not work: %s. Retrying try %s out of %s' % (e, t, tries))
|
||||
self.logger.warning(f'Sending AMQP event did not work: {e}. Retrying try {t} out of {tries}')
|
||||
else:
|
||||
self.logger.error('Could not send out AMQP event for %s tries, aborting.' % tries)
|
||||
self.logger.error(f'Could not send out AMQP event for {tries} tries, aborting.')
|
||||
|
||||
def publish(self, project, force=False):
|
||||
self.setup(project)
|
||||
@ -179,7 +179,7 @@ class ToTestPublisher(ToTestManager):
|
||||
current_snapshot = self.get_status('testing')
|
||||
|
||||
if self.get_status('publishing') == current_snapshot:
|
||||
self.logger.info('{} is already publishing'.format(current_snapshot))
|
||||
self.logger.info(f'{current_snapshot} is already publishing')
|
||||
# migrating - if there is no published entry, the last publish call
|
||||
# didn't wait for publish - and as such didn't set published state
|
||||
if self.get_status('published') != current_snapshot:
|
||||
@ -189,8 +189,8 @@ class ToTestPublisher(ToTestManager):
|
||||
current_result = self.overall_result(current_snapshot)
|
||||
current_qa_version = self.current_qa_version()
|
||||
|
||||
self.logger.info('current_snapshot {}: {}'.format(current_snapshot, str(current_result)))
|
||||
self.logger.debug('current_qa_version {}'.format(current_qa_version))
|
||||
self.logger.info(f'current_snapshot {current_snapshot}: {str(current_result)}')
|
||||
self.logger.debug(f'current_qa_version {current_qa_version}')
|
||||
|
||||
self.send_amqp_event(current_snapshot, current_result)
|
||||
|
||||
@ -231,7 +231,7 @@ class ToTestPublisher(ToTestManager):
|
||||
|
||||
current_snapshot = self.get_status('publishing')
|
||||
if self.dryrun:
|
||||
self.logger.info('Publisher finished, updating published snapshot to {}'.format(current_snapshot))
|
||||
self.logger.info(f'Publisher finished, updating published snapshot to {current_snapshot}')
|
||||
return
|
||||
|
||||
self.update_status('published', current_snapshot)
|
||||
@ -263,8 +263,8 @@ class ToTestPublisher(ToTestManager):
|
||||
return
|
||||
|
||||
status_flag = 'published'
|
||||
data = {'text': 'tag:{}:{}:{}'.format(snapshot, status_flag, status_flag)}
|
||||
self.openqa.openqa_request('POST', 'groups/%s/comments' % group_id, data=data)
|
||||
data = {'text': f'tag:{snapshot}:{status_flag}:{status_flag}'}
|
||||
self.openqa.openqa_request('POST', f'groups/{group_id}/comments', data=data)
|
||||
|
||||
def openqa_group_id(self):
|
||||
url = makeurl(self.project.openqa_server,
|
||||
|
@ -38,7 +38,7 @@ class ToTestReleaser(ToTestManager):
|
||||
return None
|
||||
|
||||
if testing_snapshot != self.get_status('failed') and testing_snapshot != self.get_status('published'):
|
||||
self.logger.debug('Snapshot {} is still in progress'.format(testing_snapshot))
|
||||
self.logger.debug(f'Snapshot {testing_snapshot} is still in progress')
|
||||
return QAResult.inprogress
|
||||
|
||||
self.logger.info('testing snapshot %s', testing_snapshot)
|
||||
@ -60,7 +60,7 @@ class ToTestReleaser(ToTestManager):
|
||||
|
||||
def release_version(self):
|
||||
url = self.api.makeurl(['build', self.project.name, 'standard', self.project.arch,
|
||||
'000release-packages:%s-release' % self.project.base])
|
||||
f'000release-packages:{self.project.base}-release'])
|
||||
f = self.api.retried_GET(url)
|
||||
root = ET.parse(f).getroot()
|
||||
for binary in root.findall('binary'):
|
||||
@ -69,7 +69,7 @@ class ToTestReleaser(ToTestManager):
|
||||
if result:
|
||||
return result.group(1)
|
||||
|
||||
raise NotFoundException("can't find %s version" % self.project.name)
|
||||
raise NotFoundException(f"can't find {self.project.name} version")
|
||||
|
||||
def version_from_project(self):
|
||||
if not self.project.take_source_from_product:
|
||||
@ -109,11 +109,11 @@ class ToTestReleaser(ToTestManager):
|
||||
|
||||
if any(failed):
|
||||
self.logger.info(
|
||||
'%s %s %s %s -> %s' % (project, package, repository, arch, failed[0].get('code')))
|
||||
f"{project} {package} {repository} {arch} -> {failed[0].get('code')}")
|
||||
return False
|
||||
|
||||
if not len(root.findall('result/status[@code="succeeded"]')):
|
||||
self.logger.info('No "succeeded" for %s %s %s %s' % (project, package, repository, arch))
|
||||
self.logger.info(f'No "succeeded" for {project} {package} {repository} {arch}')
|
||||
return False
|
||||
|
||||
maxsize = self.maxsize_for_package(package, arch)
|
||||
@ -220,12 +220,12 @@ class ToTestReleaser(ToTestManager):
|
||||
return False
|
||||
|
||||
if len(self.project.livecd_products):
|
||||
if not self.all_repos_done('%s:Live' % self.project.name):
|
||||
if not self.all_repos_done(f'{self.project.name}:Live'):
|
||||
return False
|
||||
|
||||
for product in self.project.livecd_products:
|
||||
for arch in product.archs:
|
||||
if not self.package_ok('%s:Live' % self.project.name, product.package,
|
||||
if not self.package_ok(f'{self.project.name}:Live', product.package,
|
||||
self.project.product_repo, arch):
|
||||
return False
|
||||
|
||||
@ -300,7 +300,7 @@ class ToTestReleaser(ToTestManager):
|
||||
snapshot = None
|
||||
if snapshot:
|
||||
release = self.project.snapshot_number_prefix + snapshot
|
||||
self.logger.info('Updating snapshot %s' % snapshot)
|
||||
self.logger.info(f'Updating snapshot {snapshot}')
|
||||
else:
|
||||
release = None
|
||||
if not (self.dryrun or self.project.do_not_release):
|
||||
|
@ -53,7 +53,7 @@ class ToTest(object):
|
||||
|
||||
self.jobs_num = 42
|
||||
self.load_config(apiurl)
|
||||
self.test_project = '%s:%s' % (project, self.test_subproject)
|
||||
self.test_project = f'{project}:{self.test_subproject}'
|
||||
|
||||
def load_config(self, apiurl):
|
||||
config = yaml.safe_load(attribute_value_load(apiurl, self.name, 'ToTestManagerConfig'))
|
||||
|
Loading…
x
Reference in New Issue
Block a user