Use f-strings where possible
This commit is contained in:
parent
9ec6e64546
commit
730630f06f
70
ReviewBot.py
70
ReviewBot.py
@ -121,9 +121,9 @@ class ReviewBot(object):
|
|||||||
self.request_default_return = None
|
self.request_default_return = None
|
||||||
self.comment_handler = False
|
self.comment_handler = False
|
||||||
self.override_allow = True
|
self.override_allow = True
|
||||||
self.override_group_key = '{}-override-group'.format(self.bot_name.lower())
|
self.override_group_key = f'{self.bot_name.lower()}-override-group'
|
||||||
self.request_age_min_default = 0
|
self.request_age_min_default = 0
|
||||||
self.request_age_min_key = '{}-request-age-min'.format(self.bot_name.lower())
|
self.request_age_min_key = f'{self.bot_name.lower()}-request-age-min'
|
||||||
self.lookup = PackageLookup(self.apiurl)
|
self.lookup = PackageLookup(self.apiurl)
|
||||||
|
|
||||||
self.load_config()
|
self.load_config()
|
||||||
@ -147,7 +147,7 @@ class ReviewBot(object):
|
|||||||
return True
|
return True
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
if e.code != 404:
|
if e.code != 404:
|
||||||
self.logger.error('ERROR in URL %s [%s]' % (url, e))
|
self.logger.error(f'ERROR in URL {url} [{e}]')
|
||||||
raise
|
raise
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -172,7 +172,7 @@ class ReviewBot(object):
|
|||||||
def review_mode(self, value: Union[ReviewChoices, str]) -> None:
|
def review_mode(self, value: Union[ReviewChoices, str]) -> None:
|
||||||
val = ReviewChoices(value)
|
val = ReviewChoices(value)
|
||||||
if val not in self.REVIEW_CHOICES:
|
if val not in self.REVIEW_CHOICES:
|
||||||
raise ValueError("invalid review option: %s" % val)
|
raise ValueError(f"invalid review option: {val}")
|
||||||
self._review_mode = val
|
self._review_mode = val
|
||||||
|
|
||||||
def set_request_ids(self, ids):
|
def set_request_ids(self, ids):
|
||||||
@ -196,7 +196,7 @@ class ReviewBot(object):
|
|||||||
return_value = 0
|
return_value = 0
|
||||||
|
|
||||||
for req in self.requests:
|
for req in self.requests:
|
||||||
self.logger.info("checking %s" % req.reqid)
|
self.logger.info(f"checking {req.reqid}")
|
||||||
self.request = req
|
self.request = req
|
||||||
|
|
||||||
# XXX: this is a hack. Annotating the request with staging_project.
|
# XXX: this is a hack. Annotating the request with staging_project.
|
||||||
@ -225,7 +225,7 @@ class ReviewBot(object):
|
|||||||
good = True
|
good = True
|
||||||
|
|
||||||
if good is None:
|
if good is None:
|
||||||
self.logger.info("%s ignored" % req.reqid)
|
self.logger.info(f"{req.reqid} ignored")
|
||||||
elif good:
|
elif good:
|
||||||
self._set_review(req, 'accepted')
|
self._set_review(req, 'accepted')
|
||||||
elif self.review_mode != ReviewChoices.ACCEPT_ONPASS:
|
elif self.review_mode != ReviewChoices.ACCEPT_ONPASS:
|
||||||
@ -256,7 +256,7 @@ class ReviewBot(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
for args, who in self.request_commands('override'):
|
for args, who in self.request_commands('override'):
|
||||||
message = 'overridden by {}'.format(who)
|
message = f'overridden by {who}'
|
||||||
override = args[1] if len(args) >= 2 else 'accept'
|
override = args[1] if len(args) >= 2 else 'accept'
|
||||||
if override == 'accept':
|
if override == 'accept':
|
||||||
self.review_messages['accepted'] = message
|
self.review_messages['accepted'] = message
|
||||||
@ -285,7 +285,7 @@ class ReviewBot(object):
|
|||||||
def _set_review(self, req, state):
|
def _set_review(self, req, state):
|
||||||
doit = self.can_accept_review(req.reqid)
|
doit = self.can_accept_review(req.reqid)
|
||||||
if doit is None:
|
if doit is None:
|
||||||
self.logger.info("can't change state, %s does not have the reviewer" % (req.reqid))
|
self.logger.info(f"can't change state, {req.reqid} does not have the reviewer")
|
||||||
|
|
||||||
newstate = state
|
newstate = state
|
||||||
|
|
||||||
@ -293,11 +293,11 @@ class ReviewBot(object):
|
|||||||
by_group = self.fallback_group
|
by_group = self.fallback_group
|
||||||
|
|
||||||
msg = self.review_messages[state] if state in self.review_messages else state
|
msg = self.review_messages[state] if state in self.review_messages else state
|
||||||
self.logger.info("%s %s: %s" % (req.reqid, state, msg))
|
self.logger.info(f"{req.reqid} {state}: {msg}")
|
||||||
|
|
||||||
if state == 'declined':
|
if state == 'declined':
|
||||||
if self.review_mode == ReviewChoices.FALLBACK_ONFAIL:
|
if self.review_mode == ReviewChoices.FALLBACK_ONFAIL:
|
||||||
self.logger.info("%s needs fallback reviewer" % req.reqid)
|
self.logger.info(f"{req.reqid} needs fallback reviewer")
|
||||||
self.add_review(req, by_group=by_group, by_user=by_user,
|
self.add_review(req, by_group=by_group, by_user=by_user,
|
||||||
msg="Automated review failed. Needs fallback reviewer.")
|
msg="Automated review failed. Needs fallback reviewer.")
|
||||||
newstate = 'accepted'
|
newstate = 'accepted'
|
||||||
@ -306,9 +306,9 @@ class ReviewBot(object):
|
|||||||
|
|
||||||
if doit:
|
if doit:
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
self.logger.info("(dryrun) would set %s to %s with message %s" % (req.reqid, state, msg))
|
self.logger.info(f"(dryrun) would set {req.reqid} to {state} with message {msg}")
|
||||||
else:
|
else:
|
||||||
self.logger.debug("setting %s to %s" % (req.reqid, state))
|
self.logger.debug(f"setting {req.reqid} to {state}")
|
||||||
try:
|
try:
|
||||||
osc.core.change_review_state(apiurl=self.apiurl,
|
osc.core.change_review_state(apiurl=self.apiurl,
|
||||||
reqid=req.reqid, newstate=newstate,
|
reqid=req.reqid, newstate=newstate,
|
||||||
@ -319,7 +319,7 @@ class ReviewBot(object):
|
|||||||
raise e
|
raise e
|
||||||
self.logger.info('unable to change review state (likely superseded or revoked)')
|
self.logger.info('unable to change review state (likely superseded or revoked)')
|
||||||
else:
|
else:
|
||||||
self.logger.debug("%s review not changed" % (req.reqid))
|
self.logger.debug(f"{req.reqid} review not changed")
|
||||||
|
|
||||||
def _is_duplicate_review(self, review, query, allow_duplicate):
|
def _is_duplicate_review(self, review, query, allow_duplicate):
|
||||||
if review.by_group != query.get('by_group'):
|
if review.by_group != query.get('by_group'):
|
||||||
@ -363,7 +363,7 @@ class ReviewBot(object):
|
|||||||
|
|
||||||
u = osc.core.makeurl(self.apiurl, ['request', req.reqid], query)
|
u = osc.core.makeurl(self.apiurl, ['request', req.reqid], query)
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
self.logger.info('POST %s' % u)
|
self.logger.info(f'POST {u}')
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.multiple_actions:
|
if self.multiple_actions:
|
||||||
@ -376,17 +376,17 @@ class ReviewBot(object):
|
|||||||
if e.code != 403:
|
if e.code != 403:
|
||||||
raise e
|
raise e
|
||||||
del query['cmd']
|
del query['cmd']
|
||||||
self.logger.info('unable to add review {} with message: {}'.format(query, msg))
|
self.logger.info(f'unable to add review {query} with message: {msg}')
|
||||||
return
|
return
|
||||||
|
|
||||||
code = ET.parse(r).getroot().attrib['code']
|
code = ET.parse(r).getroot().attrib['code']
|
||||||
if code != 'ok':
|
if code != 'ok':
|
||||||
raise Exception('non-ok return code: {}'.format(code))
|
raise Exception(f'non-ok return code: {code}')
|
||||||
|
|
||||||
def devel_project_review_add(self, request, project, package, message='adding devel project review'):
|
def devel_project_review_add(self, request, project, package, message='adding devel project review'):
|
||||||
devel_project, devel_package = devel_project_fallback(self.apiurl, project, package)
|
devel_project, devel_package = devel_project_fallback(self.apiurl, project, package)
|
||||||
if not devel_project:
|
if not devel_project:
|
||||||
self.logger.warning('no devel project found for {}/{}'.format(project, package))
|
self.logger.warning(f'no devel project found for {project}/{package}')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.add_review(request, by_project=devel_project, by_package=devel_package, msg=message)
|
self.add_review(request, by_project=devel_project, by_package=devel_package, msg=message)
|
||||||
@ -541,7 +541,7 @@ class ReviewBot(object):
|
|||||||
# to find the real package name
|
# to find the real package name
|
||||||
(linkprj, linkpkg) = self._get_linktarget(a.src_project, pkgname)
|
(linkprj, linkpkg) = self._get_linktarget(a.src_project, pkgname)
|
||||||
if linkpkg is None or linkprj is None or linkprj != a.tgt_project:
|
if linkpkg is None or linkprj is None or linkprj != a.tgt_project:
|
||||||
self.logger.warning("%s/%s is not a link to %s" % (a.src_project, pkgname, a.tgt_project))
|
self.logger.warning(f"{a.src_project}/{pkgname} is not a link to {a.tgt_project}")
|
||||||
return self.check_source_submission(a.src_project, a.src_package, a.src_rev, a.tgt_project, a.tgt_package)
|
return self.check_source_submission(a.src_project, a.src_package, a.src_rev, a.tgt_project, a.tgt_package)
|
||||||
else:
|
else:
|
||||||
pkgname = linkpkg
|
pkgname = linkpkg
|
||||||
@ -555,14 +555,14 @@ class ReviewBot(object):
|
|||||||
# comment_write() is called by another bot wrapping __default().
|
# comment_write() is called by another bot wrapping __default().
|
||||||
self.comment_handler_remove()
|
self.comment_handler_remove()
|
||||||
|
|
||||||
message = 'unhandled request type {}'.format(a.type)
|
message = f'unhandled request type {a.type}'
|
||||||
self.logger.info(message)
|
self.logger.info(message)
|
||||||
self.review_messages['accepted'] += ': ' + message
|
self.review_messages['accepted'] += ': ' + message
|
||||||
return self.request_default_return
|
return self.request_default_return
|
||||||
|
|
||||||
def check_source_submission(self, src_project: str, src_package: str, src_rev: str, target_project: str, target_package: str) -> None:
|
def check_source_submission(self, src_project: str, src_package: str, src_rev: str, target_project: str, target_package: str) -> None:
|
||||||
""" default implemention does nothing """
|
""" default implemention does nothing """
|
||||||
self.logger.info("%s/%s@%s -> %s/%s" % (src_project, src_package, src_rev, target_project, target_package))
|
self.logger.info(f"{src_project}/{src_package}@{src_rev} -> {target_project}/{target_package}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -637,17 +637,17 @@ class ReviewBot(object):
|
|||||||
if self.review_group and self._has_open_review_by(root, 'by_group', self.review_group):
|
if self.review_group and self._has_open_review_by(root, 'by_group', self.review_group):
|
||||||
return True
|
return True
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
print('ERROR in URL %s [%s]' % (url, e))
|
print(f'ERROR in URL {url} [{e}]')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def set_request_ids_search_review(self):
|
def set_request_ids_search_review(self):
|
||||||
review = None
|
review = None
|
||||||
if self.review_user:
|
if self.review_user:
|
||||||
review = "@by_user='%s' and @state='new'" % self.review_user
|
review = f"@by_user='{self.review_user}' and @state='new'"
|
||||||
if self.review_group:
|
if self.review_group:
|
||||||
review = osc.core.xpath_join(review, "@by_group='%s' and @state='new'" % self.review_group)
|
review = osc.core.xpath_join(review, f"@by_group='{self.review_group}' and @state='new'")
|
||||||
url = osc.core.makeurl(self.apiurl, ('search', 'request'), {
|
url = osc.core.makeurl(self.apiurl, ('search', 'request'), {
|
||||||
'match': "state/@name='review' and review[%s]" % review, 'withfullhistory': 1})
|
'match': f"state/@name='review' and review[{review}]", 'withfullhistory': 1})
|
||||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||||
|
|
||||||
self.requests = []
|
self.requests = []
|
||||||
@ -659,7 +659,7 @@ class ReviewBot(object):
|
|||||||
|
|
||||||
# also used by openqabot
|
# also used by openqabot
|
||||||
def ids_project(self, project, typename):
|
def ids_project(self, project, typename):
|
||||||
xpath = "(state/@name='review' or state/@name='new') and (action/target/@project='%s' and action/@type='%s')" % (project, typename)
|
xpath = f"(state/@name='review' or state/@name='new') and (action/target/@project='{project}' and action/@type='{typename}')"
|
||||||
url = osc.core.makeurl(self.apiurl, ('search', 'request'),
|
url = osc.core.makeurl(self.apiurl, ('search', 'request'),
|
||||||
{'match': xpath,
|
{'match': xpath,
|
||||||
'withfullhistory': 1})
|
'withfullhistory': 1})
|
||||||
@ -732,7 +732,7 @@ class ReviewBot(object):
|
|||||||
|
|
||||||
if message is None:
|
if message is None:
|
||||||
if not len(self.comment_handler.lines):
|
if not len(self.comment_handler.lines):
|
||||||
self.logger.debug('skipping empty comment for {}'.format(debug_key))
|
self.logger.debug(f'skipping empty comment for {debug_key}')
|
||||||
return
|
return
|
||||||
message = '\n\n'.join(self.comment_handler.lines)
|
message = '\n\n'.join(self.comment_handler.lines)
|
||||||
|
|
||||||
@ -756,21 +756,21 @@ class ReviewBot(object):
|
|||||||
|
|
||||||
if self._is_comment_identical(comment, message, identical):
|
if self._is_comment_identical(comment, message, identical):
|
||||||
# Assume same state/result and number of lines in message is duplicate.
|
# Assume same state/result and number of lines in message is duplicate.
|
||||||
self.logger.debug('previous comment too similar on {}'.format(debug_key))
|
self.logger.debug(f'previous comment too similar on {debug_key}')
|
||||||
return
|
return
|
||||||
|
|
||||||
if comment is None:
|
if comment is None:
|
||||||
self.logger.debug('broadening search to include any state on {}'.format(debug_key))
|
self.logger.debug(f'broadening search to include any state on {debug_key}')
|
||||||
comment, _ = self.comment_api.comment_find(comments, bot_name)
|
comment, _ = self.comment_api.comment_find(comments, bot_name)
|
||||||
if comment is not None:
|
if comment is not None:
|
||||||
self.logger.debug('removing previous comment on {}'.format(debug_key))
|
self.logger.debug(f'removing previous comment on {debug_key}')
|
||||||
if not self.dryrun:
|
if not self.dryrun:
|
||||||
self.comment_api.delete(comment['id'])
|
self.comment_api.delete(comment['id'])
|
||||||
elif only_replace:
|
elif only_replace:
|
||||||
self.logger.debug('no previous comment to replace on {}'.format(debug_key))
|
self.logger.debug(f'no previous comment to replace on {debug_key}')
|
||||||
return
|
return
|
||||||
|
|
||||||
self.logger.debug('adding comment to {}: {}'.format(debug_key, message))
|
self.logger.debug(f'adding comment to {debug_key}: {message}')
|
||||||
if not self.dryrun:
|
if not self.dryrun:
|
||||||
self.comment_api.add_comment(comment=message, **kwargs)
|
self.comment_api.add_comment(comment=message, **kwargs)
|
||||||
|
|
||||||
@ -787,7 +787,7 @@ class ReviewBot(object):
|
|||||||
|
|
||||||
def _check_matching_srcmd5(self, project, package, rev, history_limit=5):
|
def _check_matching_srcmd5(self, project, package, rev, history_limit=5):
|
||||||
"""check if factory sources contain the package and revision. check head and history"""
|
"""check if factory sources contain the package and revision. check head and history"""
|
||||||
self.logger.debug("checking %s in %s" % (package, project))
|
self.logger.debug(f"checking {package} in {project}")
|
||||||
try:
|
try:
|
||||||
osc.core.show_package_meta(self.apiurl, project, package)
|
osc.core.show_package_meta(self.apiurl, project, package)
|
||||||
except (HTTPError, URLError):
|
except (HTTPError, URLError):
|
||||||
@ -816,9 +816,9 @@ class ReviewBot(object):
|
|||||||
node = revision.find('srcmd5')
|
node = revision.find('srcmd5')
|
||||||
if node is None:
|
if node is None:
|
||||||
continue
|
continue
|
||||||
self.logger.debug("checking %s" % node.text)
|
self.logger.debug(f"checking {node.text}")
|
||||||
if node.text == rev:
|
if node.text == rev:
|
||||||
self.logger.debug("got it, rev %s" % revision.get('rev'))
|
self.logger.debug(f"got it, rev {revision.get('rev')}")
|
||||||
return True
|
return True
|
||||||
if i == history_limit:
|
if i == history_limit:
|
||||||
break
|
break
|
||||||
@ -995,7 +995,7 @@ class CommandLineInterface(cmdln.Cmdln):
|
|||||||
except ExTimeout:
|
except ExTimeout:
|
||||||
pass
|
pass
|
||||||
signal.alarm(0)
|
signal.alarm(0)
|
||||||
self.logger.info("recheck at %s" % datetime.datetime.now().isoformat())
|
self.logger.info(f"recheck at {datetime.datetime.now().isoformat()}")
|
||||||
else:
|
else:
|
||||||
self.logger.info("sleeping %d minutes." % interval)
|
self.logger.info("sleeping %d minutes." % interval)
|
||||||
time.sleep(interval * 60)
|
time.sleep(interval * 60)
|
||||||
|
@ -51,7 +51,7 @@ class ToolBase(object):
|
|||||||
return http_GET(url)
|
return http_GET(url)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
if 500 <= e.code <= 599:
|
if 500 <= e.code <= 599:
|
||||||
print('Retrying {}'.format(url))
|
print(f'Retrying {url}')
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
return self.retried_GET(url)
|
return self.retried_GET(url)
|
||||||
logging.error('%s: %s', e, url)
|
logging.error('%s: %s', e, url)
|
||||||
@ -60,7 +60,7 @@ class ToolBase(object):
|
|||||||
logging.error('%s: "%s - %s" %s', e, e.reason, type(e.reason), url)
|
logging.error('%s: "%s - %s" %s', e, e.reason, type(e.reason), url)
|
||||||
# connection timeout
|
# connection timeout
|
||||||
if isinstance(e.reason, TimeoutError):
|
if isinstance(e.reason, TimeoutError):
|
||||||
print('Retrying {}'.format(url))
|
print(f'Retrying {url}')
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
return self.retried_GET(url)
|
return self.retried_GET(url)
|
||||||
raise e
|
raise e
|
||||||
@ -202,7 +202,7 @@ class CommandLineInterface(cmdln.Cmdln):
|
|||||||
except ExTimeout:
|
except ExTimeout:
|
||||||
pass
|
pass
|
||||||
signal.alarm(0)
|
signal.alarm(0)
|
||||||
logger.info("recheck at %s" % datetime.datetime.now().isoformat())
|
logger.info(f"recheck at {datetime.datetime.now().isoformat()}")
|
||||||
continue
|
continue
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -40,9 +40,9 @@ class BoilderPlate(cmdln.Cmdln):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
for req in self.session.query(DB.Request).all():
|
for req in self.session.query(DB.Request).all():
|
||||||
print('%s %s'%(req.id, req.state))
|
print(f'{req.id} {req.state}')
|
||||||
for a in req.abichecks:
|
for a in req.abichecks:
|
||||||
print(' %s %s %s'%(a.dst_project, a.dst_package, a.result))
|
print(f' {a.dst_project} {a.dst_package} {a.result}')
|
||||||
for r in a.reports:
|
for r in a.reports:
|
||||||
print(' %s %10s %-25s %s'%(r.id, r.arch, r.dst_lib, r.result))
|
print(' %s %10s %-25s %s'%(r.id, r.arch, r.dst_lib, r.result))
|
||||||
|
|
||||||
@ -98,7 +98,7 @@ class BoilderPlate(cmdln.Cmdln):
|
|||||||
|
|
||||||
request = self.session.query(DB.Request).filter(DB.Request.id == request_id).one()
|
request = self.session.query(DB.Request).filter(DB.Request.id == request_id).one()
|
||||||
logentry = DB.Log(request_id = request_id,
|
logentry = DB.Log(request_id = request_id,
|
||||||
line = 'manually setting state to seen. previous state: %s (%s)'%(request.state, request.result))
|
line = f'manually setting state to seen. previous state: {request.state} ({request.result})')
|
||||||
request.state = 'seen'
|
request.state = 'seen'
|
||||||
request.result = None
|
request.result = None
|
||||||
self.session.add(logentry)
|
self.session.add(logentry)
|
||||||
|
@ -78,7 +78,7 @@ LibResult = namedtuple('LibResult', ('src_repo', 'src_lib', 'dst_repo', 'dst_lib
|
|||||||
class DistUrlMismatch(Exception):
|
class DistUrlMismatch(Exception):
|
||||||
def __init__(self, disturl, md5):
|
def __init__(self, disturl, md5):
|
||||||
Exception.__init__(self)
|
Exception.__init__(self)
|
||||||
self.msg = 'disturl mismatch has: %s wanted ...%s'%(disturl, md5)
|
self.msg = f'disturl mismatch has: {disturl} wanted ...{md5}'
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.msg
|
return self.msg
|
||||||
@ -87,7 +87,7 @@ class DistUrlMismatch(Exception):
|
|||||||
class SourceBroken(Exception):
|
class SourceBroken(Exception):
|
||||||
def __init__(self, project, package):
|
def __init__(self, project, package):
|
||||||
Exception.__init__(self)
|
Exception.__init__(self)
|
||||||
self.msg = '%s/%s has broken sources, needs rebase'%(project, package)
|
self.msg = f'{project}/{package} has broken sources, needs rebase'
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.msg
|
return self.msg
|
||||||
@ -96,7 +96,7 @@ class SourceBroken(Exception):
|
|||||||
class NoBuildSuccess(Exception):
|
class NoBuildSuccess(Exception):
|
||||||
def __init__(self, project, package, md5):
|
def __init__(self, project, package, md5):
|
||||||
Exception.__init__(self)
|
Exception.__init__(self)
|
||||||
self.msg = '%s/%s(%s) had no successful build'%(project, package, md5)
|
self.msg = f'{project}/{package}({md5}) had no successful build'
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.msg
|
return self.msg
|
||||||
@ -105,7 +105,7 @@ class NoBuildSuccess(Exception):
|
|||||||
class NotReadyYet(Exception):
|
class NotReadyYet(Exception):
|
||||||
def __init__(self, project, package, reason):
|
def __init__(self, project, package, reason):
|
||||||
Exception.__init__(self)
|
Exception.__init__(self)
|
||||||
self.msg = '%s/%s not ready yet: %s'%(project, package, reason)
|
self.msg = f'{project}/{package} not ready yet: {reason}'
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.msg
|
return self.msg
|
||||||
@ -218,14 +218,14 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
dst_srcinfo = self.get_sourceinfo(dst_project, dst_package)
|
dst_srcinfo = self.get_sourceinfo(dst_project, dst_package)
|
||||||
self.logger.debug('dest sourceinfo %s', pformat(dst_srcinfo))
|
self.logger.debug('dest sourceinfo %s', pformat(dst_srcinfo))
|
||||||
if dst_srcinfo is None:
|
if dst_srcinfo is None:
|
||||||
msg = "%s/%s seems to be a new package, no need to review"%(dst_project, dst_package)
|
msg = f"{dst_project}/{dst_package} seems to be a new package, no need to review"
|
||||||
self.logger.info(msg)
|
self.logger.info(msg)
|
||||||
self.reports.append(report)
|
self.reports.append(report)
|
||||||
return True
|
return True
|
||||||
src_srcinfo = self.get_sourceinfo(src_project, src_package, src_rev)
|
src_srcinfo = self.get_sourceinfo(src_project, src_package, src_rev)
|
||||||
self.logger.debug('src sourceinfo %s', pformat(src_srcinfo))
|
self.logger.debug('src sourceinfo %s', pformat(src_srcinfo))
|
||||||
if src_srcinfo is None:
|
if src_srcinfo is None:
|
||||||
msg = "%s/%s@%s does not exist!? can't check"%(src_project, src_package, src_rev)
|
msg = f"{src_project}/{src_package}@{src_rev} does not exist!? can't check"
|
||||||
self.logger.error(msg)
|
self.logger.error(msg)
|
||||||
self.text_summary += msg + "\n"
|
self.text_summary += msg + "\n"
|
||||||
self.reports.append(report)
|
self.reports.append(report)
|
||||||
@ -239,7 +239,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
myrepos = self.findrepos(src_project, src_srcinfo, dst_project, dst_srcinfo)
|
myrepos = self.findrepos(src_project, src_srcinfo, dst_project, dst_srcinfo)
|
||||||
except NoBuildSuccess as e:
|
except NoBuildSuccess as e:
|
||||||
self.logger.info(e)
|
self.logger.info(e)
|
||||||
self.text_summary += "**Error**: %s\n"%e
|
self.text_summary += f"**Error**: {e}\n"
|
||||||
self.reports.append(report)
|
self.reports.append(report)
|
||||||
return False
|
return False
|
||||||
except NotReadyYet as e:
|
except NotReadyYet as e:
|
||||||
@ -248,12 +248,12 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
return None
|
return None
|
||||||
except SourceBroken as e:
|
except SourceBroken as e:
|
||||||
self.logger.error(e)
|
self.logger.error(e)
|
||||||
self.text_summary += "**Error**: %s\n"%e
|
self.text_summary += f"**Error**: {e}\n"
|
||||||
self.reports.append(report)
|
self.reports.append(report)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not myrepos:
|
if not myrepos:
|
||||||
self.text_summary += "**Error**: %s does not build against %s, can't check library ABIs\n\n"%(src_project, dst_project)
|
self.text_summary += f"**Error**: {src_project} does not build against {dst_project}, can't check library ABIs\n\n"
|
||||||
self.logger.info("no matching repos, can't compare")
|
self.logger.info("no matching repos, can't compare")
|
||||||
self.reports.append(report)
|
self.reports.append(report)
|
||||||
return False
|
return False
|
||||||
@ -272,13 +272,13 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
if new_repo_map is not None:
|
if new_repo_map is not None:
|
||||||
myrepos = new_repo_map
|
myrepos = new_repo_map
|
||||||
except MaintenanceError as e:
|
except MaintenanceError as e:
|
||||||
self.text_summary += "**Error**: %s\n\n"%e
|
self.text_summary += f"**Error**: {e}\n\n"
|
||||||
self.logger.error('%s', e)
|
self.logger.error('%s', e)
|
||||||
self.reports.append(report)
|
self.reports.append(report)
|
||||||
return False
|
return False
|
||||||
except NoBuildSuccess as e:
|
except NoBuildSuccess as e:
|
||||||
self.logger.info(e)
|
self.logger.info(e)
|
||||||
self.text_summary += "**Error**: %s\n"%e
|
self.text_summary += f"**Error**: {e}\n"
|
||||||
self.reports.append(report)
|
self.reports.append(report)
|
||||||
return False
|
return False
|
||||||
except NotReadyYet as e:
|
except NotReadyYet as e:
|
||||||
@ -287,7 +287,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
return None
|
return None
|
||||||
except SourceBroken as e:
|
except SourceBroken as e:
|
||||||
self.logger.error(e)
|
self.logger.error(e)
|
||||||
self.text_summary += "**Error**: %s\n"%e
|
self.text_summary += f"**Error**: {e}\n"
|
||||||
self.reports.append(report)
|
self.reports.append(report)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -305,7 +305,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
if dst_libs is None:
|
if dst_libs is None:
|
||||||
continue
|
continue
|
||||||
except DistUrlMismatch as e:
|
except DistUrlMismatch as e:
|
||||||
self.logger.error("%s/%s %s/%s: %s"%(dst_project, dst_package, mr.dstrepo, mr.arch, e))
|
self.logger.error(f"{dst_project}/{dst_package} {mr.dstrepo}/{mr.arch}: {e}")
|
||||||
if ret == True: # need to check again
|
if ret == True: # need to check again
|
||||||
ret = None
|
ret = None
|
||||||
continue
|
continue
|
||||||
@ -326,7 +326,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
self.text_summary += "*Warning*: the submission does not contain any libs anymore\n\n"
|
self.text_summary += "*Warning*: the submission does not contain any libs anymore\n\n"
|
||||||
continue
|
continue
|
||||||
except DistUrlMismatch as e:
|
except DistUrlMismatch as e:
|
||||||
self.logger.error("%s/%s %s/%s: %s"%(src_project, src_package, mr.srcrepo, mr.arch, e))
|
self.logger.error(f"{src_project}/{src_package} {mr.srcrepo}/{mr.arch}: {e}")
|
||||||
if ret == True: # need to check again
|
if ret == True: # need to check again
|
||||||
ret = None
|
ret = None
|
||||||
continue
|
continue
|
||||||
@ -362,7 +362,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
pairs.add((lib, l))
|
pairs.add((lib, l))
|
||||||
found = True
|
found = True
|
||||||
if found == False:
|
if found == False:
|
||||||
self.text_summary += "*Warning*: %s no longer packaged\n\n"%lib
|
self.text_summary += f"*Warning*: {lib} no longer packaged\n\n"
|
||||||
|
|
||||||
self.logger.debug("to diff: %s", pformat(pairs))
|
self.logger.debug("to diff: %s", pformat(pairs))
|
||||||
|
|
||||||
@ -385,7 +385,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
# we just need that to pass a name to abi checker
|
# we just need that to pass a name to abi checker
|
||||||
m = so_re.match(old)
|
m = so_re.match(old)
|
||||||
htmlreport = 'report-%s-%s-%s-%s-%s-%08x.html'%(mr.srcrepo, os.path.basename(old), mr.dstrepo, os.path.basename(new), mr.arch, int(time.time()))
|
htmlreport = f'report-{mr.srcrepo}-{os.path.basename(old)}-{mr.dstrepo}-{os.path.basename(new)}-{mr.arch}-{int(time.time()):08x}.html'
|
||||||
|
|
||||||
# run abichecker
|
# run abichecker
|
||||||
if m \
|
if m \
|
||||||
@ -401,8 +401,8 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
elif overall == True and r == False:
|
elif overall == True and r == False:
|
||||||
overall = r
|
overall = r
|
||||||
else:
|
else:
|
||||||
self.logger.error('failed to compare %s <> %s'%(old,new))
|
self.logger.error(f'failed to compare {old} <> {new}')
|
||||||
self.text_summary += "**Error**: ABI check failed on %s vs %s\n\n"%(old, new)
|
self.text_summary += f"**Error**: ABI check failed on {old} vs {new}\n\n"
|
||||||
if ret == True: # need to check again
|
if ret == True: # need to check again
|
||||||
ret = None
|
ret = None
|
||||||
|
|
||||||
@ -429,7 +429,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
# find the maintenance project
|
# find the maintenance project
|
||||||
url = osc.core.makeurl(self.apiurl, ('search', 'project', 'id'),
|
url = osc.core.makeurl(self.apiurl, ('search', 'project', 'id'),
|
||||||
"match=(maintenance/maintains/@project='%s'+and+attribute/@name='%s')"%(dst_project, osc.conf.config['maintenance_attribute']))
|
f"match=(maintenance/maintains/@project='{dst_project}'+and+attribute/@name='{osc.conf.config['maintenance_attribute']}')")
|
||||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||||
if root is not None:
|
if root is not None:
|
||||||
node = root.find('project')
|
node = root.find('project')
|
||||||
@ -448,7 +448,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
if node.get('code') != 'disabled':
|
if node.get('code') != 'disabled':
|
||||||
alldisabled = False
|
alldisabled = False
|
||||||
if alldisabled:
|
if alldisabled:
|
||||||
self.logger.debug("all repos disabled, using originproject %s"%originproject)
|
self.logger.debug(f"all repos disabled, using originproject {originproject}")
|
||||||
else:
|
else:
|
||||||
originproject = None
|
originproject = None
|
||||||
else:
|
else:
|
||||||
@ -456,20 +456,20 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
# packages are only a link to packagename.incidentnr
|
# packages are only a link to packagename.incidentnr
|
||||||
(linkprj, linkpkg) = self._get_linktarget(dst_project, pkg)
|
(linkprj, linkpkg) = self._get_linktarget(dst_project, pkg)
|
||||||
if linkpkg is not None and linkprj == dst_project:
|
if linkpkg is not None and linkprj == dst_project:
|
||||||
self.logger.debug("%s/%s links to %s"%(dst_project, pkg, linkpkg))
|
self.logger.debug(f"{dst_project}/{pkg} links to {linkpkg}")
|
||||||
regex = re.compile(r'.*\.(\d+)$')
|
regex = re.compile(r'.*\.(\d+)$')
|
||||||
m = regex.match(linkpkg)
|
m = regex.match(linkpkg)
|
||||||
if m is None:
|
if m is None:
|
||||||
raise MaintenanceError("%s/%s -> %s/%s is not a proper maintenance link (must match /%s/)"%(dst_project, pkg, linkprj, linkpkg, regex.pattern))
|
raise MaintenanceError(f"{dst_project}/{pkg} -> {linkprj}/{linkpkg} is not a proper maintenance link (must match /{regex.pattern}/)")
|
||||||
incident = m.group(1)
|
incident = m.group(1)
|
||||||
self.logger.debug("is maintenance incident %s"%incident)
|
self.logger.debug(f"is maintenance incident {incident}")
|
||||||
|
|
||||||
originproject = "%s:%s"%(mproject, incident)
|
originproject = f"{mproject}:{incident}"
|
||||||
originpackage = pkg+'.'+dst_project.replace(':', '_')
|
originpackage = pkg+'.'+dst_project.replace(':', '_')
|
||||||
|
|
||||||
origin_srcinfo = self.get_sourceinfo(originproject, originpackage)
|
origin_srcinfo = self.get_sourceinfo(originproject, originpackage)
|
||||||
if origin_srcinfo is None:
|
if origin_srcinfo is None:
|
||||||
raise MaintenanceError("%s/%s invalid"%(originproject, originpackage))
|
raise MaintenanceError(f"{originproject}/{originpackage} invalid")
|
||||||
|
|
||||||
# find the map of maintenance incident repos to destination repos
|
# find the map of maintenance incident repos to destination repos
|
||||||
originrepos = self.findrepos(originproject, origin_srcinfo, dst_project, dst_srcinfo)
|
originrepos = self.findrepos(originproject, origin_srcinfo, dst_project, dst_srcinfo)
|
||||||
@ -486,7 +486,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
# sometimes a previously released maintenance
|
# sometimes a previously released maintenance
|
||||||
# update didn't cover all architectures. We can
|
# update didn't cover all architectures. We can
|
||||||
# only ignore that then.
|
# only ignore that then.
|
||||||
self.logger.warning("couldn't find repo %s/%s in %s/%s"%(mr.dstrepo, mr.arch, originproject, originpackage))
|
self.logger.warning(f"couldn't find repo {mr.dstrepo}/{mr.arch} in {originproject}/{originpackage}")
|
||||||
continue
|
continue
|
||||||
matchrepos.add(MR(mr.srcrepo, mapped[(mr.dstrepo, mr.arch)].srcrepo, mr.arch))
|
matchrepos.add(MR(mr.srcrepo, mapped[(mr.dstrepo, mr.arch)].srcrepo, mr.arch))
|
||||||
|
|
||||||
@ -553,7 +553,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
if ret is not None and self.text_summary == '':
|
if ret is not None and self.text_summary == '':
|
||||||
# if for some reason save_reports_to_db didn't produce a
|
# if for some reason save_reports_to_db didn't produce a
|
||||||
# summary we add one
|
# summary we add one
|
||||||
self.text_summary = "ABI checker result: [%s](%s/request/%s)"%(result, WEB_URL, req.reqid)
|
self.text_summary = f"ABI checker result: [{result}]({WEB_URL}/request/{req.reqid})"
|
||||||
|
|
||||||
if commentid and not self.dryrun:
|
if commentid and not self.dryrun:
|
||||||
self.commentapi.delete(commentid)
|
self.commentapi.delete(commentid)
|
||||||
@ -613,10 +613,10 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
continue
|
continue
|
||||||
elif r.result:
|
elif r.result:
|
||||||
self.text_summary += "Good news from ABI check, "
|
self.text_summary += "Good news from ABI check, "
|
||||||
self.text_summary += "%s seems to be ABI [compatible](%s/request/%s):\n\n"%(r.dst_package, WEB_URL, req.reqid)
|
self.text_summary += f"{r.dst_package} seems to be ABI [compatible]({WEB_URL}/request/{req.reqid}):\n\n"
|
||||||
else:
|
else:
|
||||||
self.text_summary += "Warning: bad news from ABI check, "
|
self.text_summary += "Warning: bad news from ABI check, "
|
||||||
self.text_summary += "%s may be ABI [**INCOMPATIBLE**](%s/request/%s):\n\n"%(r.dst_package, WEB_URL, req.reqid)
|
self.text_summary += f"{r.dst_package} may be ABI [**INCOMPATIBLE**]({WEB_URL}/request/{req.reqid}):\n\n"
|
||||||
for lr in r.reports:
|
for lr in r.reports:
|
||||||
libreport = DB.LibReport(
|
libreport = DB.LibReport(
|
||||||
abicheck = abicheck,
|
abicheck = abicheck,
|
||||||
@ -643,7 +643,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
msg = "<!-- abichecker state=%s%s -->\n"%(state, ' result=%s'%result if result else '')
|
msg = "<!-- abichecker state=%s%s -->\n"%(state, ' result=%s'%result if result else '')
|
||||||
msg += self.text_summary
|
msg += self.text_summary
|
||||||
|
|
||||||
self.logger.info("add comment: %s"%msg)
|
self.logger.info(f"add comment: {msg}")
|
||||||
if not self.dryrun:
|
if not self.dryrun:
|
||||||
#self.commentapi.delete_from_where_user(self.review_user, request_id = req.reqid)
|
#self.commentapi.delete_from_where_user(self.review_user, request_id = req.reqid)
|
||||||
self.commentapi.add_comment(request_id = req.reqid, comment = msg)
|
self.commentapi.add_comment(request_id = req.reqid, comment = msg)
|
||||||
@ -672,7 +672,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
self.logger.debug(cmd)
|
self.logger.debug(cmd)
|
||||||
r = subprocess.Popen(cmd, close_fds=True, cwd=CACHEDIR).wait()
|
r = subprocess.Popen(cmd, close_fds=True, cwd=CACHEDIR).wait()
|
||||||
if r != 0:
|
if r != 0:
|
||||||
self.logger.error("failed to dump %s!"%filename)
|
self.logger.error(f"failed to dump {filename}!")
|
||||||
# XXX: record error
|
# XXX: record error
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
@ -683,7 +683,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
fetchlist, liblist, debuglist = self.compute_fetchlist(project, package, srcinfo, repo, arch)
|
fetchlist, liblist, debuglist = self.compute_fetchlist(project, package, srcinfo, repo, arch)
|
||||||
|
|
||||||
if not fetchlist:
|
if not fetchlist:
|
||||||
msg = "no libraries found in %s/%s %s/%s"%(project, package, repo, arch)
|
msg = f"no libraries found in {project}/{package} {repo}/{arch}"
|
||||||
self.logger.info(msg)
|
self.logger.info(msg)
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
@ -703,14 +703,14 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
# extract binary rpms
|
# extract binary rpms
|
||||||
tmpfile = os.path.join(CACHEDIR, "cpio")
|
tmpfile = os.path.join(CACHEDIR, "cpio")
|
||||||
for fn in fetchlist:
|
for fn in fetchlist:
|
||||||
self.logger.debug("extract %s"%fn)
|
self.logger.debug(f"extract {fn}")
|
||||||
with open(tmpfile, 'wb') as tmpfd:
|
with open(tmpfile, 'wb') as tmpfd:
|
||||||
if fn not in downloaded:
|
if fn not in downloaded:
|
||||||
raise FetchError("%s was not downloaded!"%fn)
|
raise FetchError(f"{fn} was not downloaded!")
|
||||||
self.logger.debug(downloaded[fn])
|
self.logger.debug(downloaded[fn])
|
||||||
r = subprocess.call(['rpm2cpio', downloaded[fn]], stdout=tmpfd, close_fds=True)
|
r = subprocess.call(['rpm2cpio', downloaded[fn]], stdout=tmpfd, close_fds=True)
|
||||||
if r != 0:
|
if r != 0:
|
||||||
raise FetchError("failed to extract %s!"%fn)
|
raise FetchError(f"failed to extract {fn}!")
|
||||||
tmpfd.close()
|
tmpfd.close()
|
||||||
os.unlink(downloaded[fn])
|
os.unlink(downloaded[fn])
|
||||||
cpio = CpioRead(tmpfile)
|
cpio = CpioRead(tmpfile)
|
||||||
@ -745,7 +745,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
downloaded = dict()
|
downloaded = dict()
|
||||||
for fn in filenames:
|
for fn in filenames:
|
||||||
if fn not in mtimes:
|
if fn not in mtimes:
|
||||||
raise FetchError("missing mtime information for %s, can't check"% fn)
|
raise FetchError(f"missing mtime information for {fn}, can't check")
|
||||||
repodir = os.path.join(DOWNLOADS, package, project, repo)
|
repodir = os.path.join(DOWNLOADS, package, project, repo)
|
||||||
if not os.path.exists(repodir):
|
if not os.path.exists(repodir):
|
||||||
os.makedirs(repodir)
|
os.makedirs(repodir)
|
||||||
@ -781,7 +781,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
try:
|
try:
|
||||||
r = osc.core.http_GET(u)
|
r = osc.core.http_GET(u)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
raise FetchError('failed to fetch header information: %s'%e)
|
raise FetchError(f'failed to fetch header information: {e}')
|
||||||
tmpfile = NamedTemporaryFile(prefix="cpio-", delete=False)
|
tmpfile = NamedTemporaryFile(prefix="cpio-", delete=False)
|
||||||
for chunk in r:
|
for chunk in r:
|
||||||
tmpfile.write(chunk)
|
tmpfile.write(chunk)
|
||||||
@ -799,7 +799,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
fh.seek(ch.dataoff, os.SEEK_SET)
|
fh.seek(ch.dataoff, os.SEEK_SET)
|
||||||
h = self.readRpmHeaderFD(fh)
|
h = self.readRpmHeaderFD(fh)
|
||||||
if h is None:
|
if h is None:
|
||||||
raise FetchError("failed to read rpm header for %s"%ch.filename)
|
raise FetchError(f"failed to read rpm header for {ch.filename}")
|
||||||
m = rpm_re.match(ch.filename.decode('utf-8'))
|
m = rpm_re.match(ch.filename.decode('utf-8'))
|
||||||
if m:
|
if m:
|
||||||
yield m.group(1), h
|
yield m.group(1), h
|
||||||
@ -827,7 +827,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
return ET.parse(osc.core.http_GET(url)).getroot()
|
return ET.parse(osc.core.http_GET(url)).getroot()
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
if e.code != 404:
|
if e.code != 404:
|
||||||
self.logger.error('ERROR in URL %s [%s]' % (url, e))
|
self.logger.error(f'ERROR in URL {url} [{e}]')
|
||||||
raise
|
raise
|
||||||
pass
|
pass
|
||||||
return None
|
return None
|
||||||
@ -890,16 +890,16 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
for mr in matchrepos:
|
for mr in matchrepos:
|
||||||
if not (mr.srcrepo, mr.arch) in rmap:
|
if not (mr.srcrepo, mr.arch) in rmap:
|
||||||
self.logger.warning("%s/%s had no build success"%(mr.srcrepo, mr.arch))
|
self.logger.warning(f"{mr.srcrepo}/{mr.arch} had no build success")
|
||||||
raise NotReadyYet(src_project, src_srcinfo.package, "no result")
|
raise NotReadyYet(src_project, src_srcinfo.package, "no result")
|
||||||
if rmap[(mr.srcrepo, mr.arch)]['dirty']:
|
if rmap[(mr.srcrepo, mr.arch)]['dirty']:
|
||||||
self.logger.warning("%s/%s dirty"%(mr.srcrepo, mr.arch))
|
self.logger.warning(f"{mr.srcrepo}/{mr.arch} dirty")
|
||||||
raise NotReadyYet(src_project, src_srcinfo.package, "dirty")
|
raise NotReadyYet(src_project, src_srcinfo.package, "dirty")
|
||||||
code = rmap[(mr.srcrepo, mr.arch)]['code']
|
code = rmap[(mr.srcrepo, mr.arch)]['code']
|
||||||
if code == 'broken':
|
if code == 'broken':
|
||||||
raise SourceBroken(src_project, src_srcinfo.package)
|
raise SourceBroken(src_project, src_srcinfo.package)
|
||||||
if code != 'succeeded' and code != 'locked' and code != 'excluded':
|
if code != 'succeeded' and code != 'locked' and code != 'excluded':
|
||||||
self.logger.warning("%s/%s not succeeded (%s)"%(mr.srcrepo, mr.arch, code))
|
self.logger.warning(f"{mr.srcrepo}/{mr.arch} not succeeded ({code})")
|
||||||
raise NotReadyYet(src_project, src_srcinfo.package, code)
|
raise NotReadyYet(src_project, src_srcinfo.package, code)
|
||||||
|
|
||||||
def findrepos(self, src_project, src_srcinfo, dst_project, dst_srcinfo):
|
def findrepos(self, src_project, src_srcinfo, dst_project, dst_srcinfo):
|
||||||
@ -928,7 +928,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
name = repo.attrib['name']
|
name = repo.attrib['name']
|
||||||
path = repo.findall('path')
|
path = repo.findall('path')
|
||||||
if path is None or len(path) != 1:
|
if path is None or len(path) != 1:
|
||||||
self.logger.error("repo %s has more than one path"%name)
|
self.logger.error(f"repo {name} has more than one path")
|
||||||
continue
|
continue
|
||||||
prj = path[0].attrib['project']
|
prj = path[0].attrib['project']
|
||||||
if prj == 'openSUSE:Tumbleweed':
|
if prj == 'openSUSE:Tumbleweed':
|
||||||
@ -959,7 +959,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
raise NoBuildSuccess(src_project, src_srcinfo.package, src_srcinfo.verifymd5)
|
raise NoBuildSuccess(src_project, src_srcinfo.package, src_srcinfo.verifymd5)
|
||||||
for mr in matchrepos:
|
for mr in matchrepos:
|
||||||
if not (mr.srcrepo, arch) in srcrepos:
|
if not (mr.srcrepo, arch) in srcrepos:
|
||||||
self.logger.error("%s/%s had no build success"%(mr.srcrepo, arch))
|
self.logger.error(f"{mr.srcrepo}/{arch} had no build success")
|
||||||
raise NoBuildSuccess(src_project, src_srcinfo.package, src_srcinfo.verifymd5)
|
raise NoBuildSuccess(src_project, src_srcinfo.package, src_srcinfo.verifymd5)
|
||||||
|
|
||||||
return matchrepos
|
return matchrepos
|
||||||
@ -990,7 +990,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
""" scan binary rpms of the specified repo for libraries.
|
""" scan binary rpms of the specified repo for libraries.
|
||||||
Returns a set of packages to fetch and the libraries found
|
Returns a set of packages to fetch and the libraries found
|
||||||
"""
|
"""
|
||||||
self.logger.debug('scanning %s/%s %s/%s'%(prj, pkg, repo, arch))
|
self.logger.debug(f'scanning {prj}/{pkg} {repo}/{arch}')
|
||||||
|
|
||||||
headers = self._fetchcpioheaders(prj, pkg, repo, arch)
|
headers = self._fetchcpioheaders(prj, pkg, repo, arch)
|
||||||
missing_debuginfo = set()
|
missing_debuginfo = set()
|
||||||
@ -1017,12 +1017,12 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
lnk = lnk.decode('utf-8')
|
lnk = lnk.decode('utf-8')
|
||||||
if so_re.match(fn):
|
if so_re.match(fn):
|
||||||
if S_ISREG(mode):
|
if S_ISREG(mode):
|
||||||
self.logger.debug('found lib: %s'%fn)
|
self.logger.debug(f'found lib: {fn}')
|
||||||
lib_packages.setdefault(pkgname, set()).add(fn)
|
lib_packages.setdefault(pkgname, set()).add(fn)
|
||||||
elif S_ISLNK(mode) and lnk is not None:
|
elif S_ISLNK(mode) and lnk is not None:
|
||||||
alias = os.path.basename(fn)
|
alias = os.path.basename(fn)
|
||||||
libname = os.path.basename(lnk)
|
libname = os.path.basename(lnk)
|
||||||
self.logger.debug('found alias: %s -> %s'%(alias, libname))
|
self.logger.debug(f'found alias: {alias} -> {libname}')
|
||||||
lib_aliases.setdefault(libname, set()).add(alias)
|
lib_aliases.setdefault(libname, set()).add(alias)
|
||||||
|
|
||||||
fetchlist = set()
|
fetchlist = set()
|
||||||
@ -1040,7 +1040,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
files = set ([f.decode('utf-8') for f in h['filenames']])
|
files = set ([f.decode('utf-8') for f in h['filenames']])
|
||||||
ok = True
|
ok = True
|
||||||
for lib in lib_packages[pkgname]:
|
for lib in lib_packages[pkgname]:
|
||||||
libdebug = '/usr/lib/debug%s.debug'%lib
|
libdebug = f'/usr/lib/debug{lib}.debug'
|
||||||
if libdebug not in files:
|
if libdebug not in files:
|
||||||
# some new format that includes version, release and arch in debuginfo?
|
# some new format that includes version, release and arch in debuginfo?
|
||||||
# FIXME: version and release are actually the
|
# FIXME: version and release are actually the
|
||||||
@ -1067,7 +1067,7 @@ class ABIChecker(ReviewBot.ReviewBot):
|
|||||||
liblist[lib] |= lib_aliases[libname]
|
liblist[lib] |= lib_aliases[libname]
|
||||||
|
|
||||||
if missing_debuginfo:
|
if missing_debuginfo:
|
||||||
self.logger.error('missing debuginfo: %s'%pformat(missing_debuginfo))
|
self.logger.error(f'missing debuginfo: {pformat(missing_debuginfo)}')
|
||||||
raise MissingDebugInfo(missing_debuginfo)
|
raise MissingDebugInfo(missing_debuginfo)
|
||||||
|
|
||||||
return fetchlist, liblist, debuglist
|
return fetchlist, liblist, debuglist
|
||||||
|
@ -74,7 +74,7 @@ class Config(Base):
|
|||||||
t_updated = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
t_updated = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
def db_engine():
|
def db_engine():
|
||||||
return create_engine('sqlite:///%s/abi-checker.db'%DATADIR)
|
return create_engine(f'sqlite:///{DATADIR}/abi-checker.db')
|
||||||
|
|
||||||
def db_session():
|
def db_session():
|
||||||
engine = db_engine()
|
engine = db_engine()
|
||||||
|
@ -108,14 +108,14 @@ class BiArchTool(ToolBase.ToolBase):
|
|||||||
if ':Rings' in self.project:
|
if ':Rings' in self.project:
|
||||||
self.biarch_packages = set()
|
self.biarch_packages = set()
|
||||||
else:
|
else:
|
||||||
self.biarch_packages = set(self.meta_get_packagelist("%s:Rings:0-Bootstrap" % self.project))
|
self.biarch_packages = set(self.meta_get_packagelist(f"{self.project}:Rings:0-Bootstrap"))
|
||||||
self.biarch_packages |= set(self.meta_get_packagelist("%s:Rings:1-MinimalX" % self.project))
|
self.biarch_packages |= set(self.meta_get_packagelist(f"{self.project}:Rings:1-MinimalX"))
|
||||||
|
|
||||||
self._init_rdeps()
|
self._init_rdeps()
|
||||||
self.fill_package_meta()
|
self.fill_package_meta()
|
||||||
|
|
||||||
def fill_package_meta(self):
|
def fill_package_meta(self):
|
||||||
url = self.makeurl(['search', 'package'], "match=[@project='%s']" % self.project)
|
url = self.makeurl(['search', 'package'], f"match=[@project='{self.project}']")
|
||||||
root = ET.fromstring(self.cached_GET(url))
|
root = ET.fromstring(self.cached_GET(url))
|
||||||
for p in root.findall('package'):
|
for p in root.findall('package'):
|
||||||
name = p.attrib['name']
|
name = p.attrib['name']
|
||||||
@ -153,7 +153,7 @@ class BiArchTool(ToolBase.ToolBase):
|
|||||||
|
|
||||||
packages = set()
|
packages = set()
|
||||||
|
|
||||||
for n in result.findall("./result[@arch='{}']/status".format(self.arch)):
|
for n in result.findall(f"./result[@arch='{self.arch}']/status"):
|
||||||
if n.get('code') not in ('disabled', 'excluded'):
|
if n.get('code') not in ('disabled', 'excluded'):
|
||||||
packages.add(n.get('package'))
|
packages.add(n.get('package'))
|
||||||
|
|
||||||
@ -167,7 +167,7 @@ class BiArchTool(ToolBase.ToolBase):
|
|||||||
pkgmeta = self.package_metas[pkg]
|
pkgmeta = self.package_metas[pkg]
|
||||||
|
|
||||||
for build in pkgmeta.findall("./build"):
|
for build in pkgmeta.findall("./build"):
|
||||||
for n in build.findall("./enable[@arch='{}']".format(self.arch)):
|
for n in build.findall(f"./enable[@arch='{self.arch}']"):
|
||||||
logger.debug("disable %s", pkg)
|
logger.debug("disable %s", pkg)
|
||||||
build.remove(n)
|
build.remove(n)
|
||||||
changed = True
|
changed = True
|
||||||
@ -233,9 +233,9 @@ class BiArchTool(ToolBase.ToolBase):
|
|||||||
must_disable = None
|
must_disable = None
|
||||||
changed = None
|
changed = None
|
||||||
|
|
||||||
for n in pkgmeta.findall("./build/enable[@arch='{}']".format(self.arch)):
|
for n in pkgmeta.findall(f"./build/enable[@arch='{self.arch}']"):
|
||||||
is_enabled = True
|
is_enabled = True
|
||||||
for n in pkgmeta.findall("./build/disable[@arch='{}']".format(self.arch)):
|
for n in pkgmeta.findall(f"./build/disable[@arch='{self.arch}']"):
|
||||||
is_disabled = True
|
is_disabled = True
|
||||||
|
|
||||||
if force:
|
if force:
|
||||||
@ -251,7 +251,7 @@ class BiArchTool(ToolBase.ToolBase):
|
|||||||
if is_disabled:
|
if is_disabled:
|
||||||
logger.info('enabling %s for %s', pkg, self.arch)
|
logger.info('enabling %s for %s', pkg, self.arch)
|
||||||
for build in pkgmeta.findall("./build"):
|
for build in pkgmeta.findall("./build"):
|
||||||
for n in build.findall("./disable[@arch='{}']".format(self.arch)):
|
for n in build.findall(f"./disable[@arch='{self.arch}']"):
|
||||||
build.remove(n)
|
build.remove(n)
|
||||||
changed = True
|
changed = True
|
||||||
if not changed:
|
if not changed:
|
||||||
@ -272,7 +272,7 @@ class BiArchTool(ToolBase.ToolBase):
|
|||||||
if is_enabled:
|
if is_enabled:
|
||||||
logger.info('removing explicit enable %s for %s', pkg, self.arch)
|
logger.info('removing explicit enable %s for %s', pkg, self.arch)
|
||||||
for build in pkgmeta.findall("./build"):
|
for build in pkgmeta.findall("./build"):
|
||||||
for n in build.findall("./enable[@arch='{}']".format(self.arch)):
|
for n in build.findall(f"./enable[@arch='{self.arch}']"):
|
||||||
build.remove(n)
|
build.remove(n)
|
||||||
changed = True
|
changed = True
|
||||||
if not changed:
|
if not changed:
|
||||||
@ -291,7 +291,7 @@ class BiArchTool(ToolBase.ToolBase):
|
|||||||
if self.caching:
|
if self.caching:
|
||||||
self._invalidate__cached_GET(pkgmetaurl)
|
self._invalidate__cached_GET(pkgmetaurl)
|
||||||
|
|
||||||
if wipebinaries and pkgmeta.find("./build/disable[@arch='{}']".format(self.arch)) is not None:
|
if wipebinaries and pkgmeta.find(f"./build/disable[@arch='{self.arch}']") is not None:
|
||||||
logger.debug("wiping %s", pkg)
|
logger.debug("wiping %s", pkg)
|
||||||
self.http_POST(self.makeurl(['build', self.project], {
|
self.http_POST(self.makeurl(['build', self.project], {
|
||||||
'cmd': 'wipe',
|
'cmd': 'wipe',
|
||||||
@ -309,7 +309,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
|||||||
def get_optparser(self):
|
def get_optparser(self):
|
||||||
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
||||||
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
||||||
help='project to process (default: %s)' % FACTORY,
|
help=f'project to process (default: {FACTORY})',
|
||||||
default=FACTORY)
|
default=FACTORY)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
12
bugowner.py
12
bugowner.py
@ -55,7 +55,7 @@ class BugownerTool(ToolBase.ToolBase):
|
|||||||
url = self.makeurl(['person', name])
|
url = self.makeurl(['person', name])
|
||||||
root = ET.fromstring(self.cached_GET(url))
|
root = ET.fromstring(self.cached_GET(url))
|
||||||
|
|
||||||
person = Person(*[root.find('./{}'.format(field)).text for field in Person._fields])
|
person = Person(*[root.find(f'./{field}').text for field in Person._fields])
|
||||||
self.persons[name] = person
|
self.persons[name] = person
|
||||||
|
|
||||||
return person
|
return person
|
||||||
@ -76,9 +76,9 @@ class BugownerTool(ToolBase.ToolBase):
|
|||||||
url = self.makeurl(['search', 'owner'], {'binary': package})
|
url = self.makeurl(['search', 'owner'], {'binary': package})
|
||||||
root = ET.fromstring(self.cached_GET(url))
|
root = ET.fromstring(self.cached_GET(url))
|
||||||
ret = []
|
ret = []
|
||||||
for node in root.findall('./owner/person[@role="{}"]'.format(role)):
|
for node in root.findall(f'./owner/person[@role="{role}"]'):
|
||||||
ret.append(Owner('person', node.get('name')))
|
ret.append(Owner('person', node.get('name')))
|
||||||
for node in root.findall('./owner/group[@role="{}"]'.format(role)):
|
for node in root.findall(f'./owner/group[@role="{role}"]'):
|
||||||
ret.append(Owner('group', node.get('name')))
|
ret.append(Owner('group', node.get('name')))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
@ -88,7 +88,7 @@ class BugownerTool(ToolBase.ToolBase):
|
|||||||
root = ET.fromstring(self.cached_GET(url))
|
root = ET.fromstring(self.cached_GET(url))
|
||||||
idname = 'userid' if owner.kind == 'person' else 'groupid'
|
idname = 'userid' if owner.kind == 'person' else 'groupid'
|
||||||
# XXX: can't use 'and' here to filter for bugowner too
|
# XXX: can't use 'and' here to filter for bugowner too
|
||||||
exists = root.findall('./{}[@{}="{}"]'.format(owner.kind, idname, owner.name))
|
exists = root.findall(f'./{owner.kind}[@{idname}="{owner.name}"]')
|
||||||
for node in exists:
|
for node in exists:
|
||||||
if node.get('role') == 'bugowner':
|
if node.get('role') == 'bugowner':
|
||||||
logger.debug("%s/%s already has %s %s", self.project, package, owner.kind, owner.name)
|
logger.debug("%s/%s already has %s %s", self.project, package, owner.kind, owner.name)
|
||||||
@ -113,7 +113,7 @@ class BugownerTool(ToolBase.ToolBase):
|
|||||||
user = srcrev['user']
|
user = srcrev['user']
|
||||||
|
|
||||||
if self.is_release_manager(user):
|
if self.is_release_manager(user):
|
||||||
logging.debug("%s was last touched by %s, ignored." % (package, user))
|
logging.debug(f"{package} was last touched by {user}, ignored.")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return [Owner('person', user)]
|
return [Owner('person', user)]
|
||||||
@ -138,7 +138,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
|||||||
def get_optparser(self):
|
def get_optparser(self):
|
||||||
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
||||||
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
||||||
help='project to process (default: %s)' % FACTORY,
|
help=f'project to process (default: {FACTORY})',
|
||||||
default=FACTORY)
|
default=FACTORY)
|
||||||
parser.add_option('--reference-project', metavar='PROJECT',
|
parser.add_option('--reference-project', metavar='PROJECT',
|
||||||
action='append', help='reference project')
|
action='append', help='reference project')
|
||||||
|
@ -31,7 +31,7 @@ class RemindedPackage(object):
|
|||||||
self.problem = problem
|
self.problem = problem
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '{} {} {} {}'.format(self.firstfail, self.reminded, self.remindCount, self.problem)
|
return f'{self.firstfail} {self.reminded} {self.remindCount} {self.problem}'
|
||||||
|
|
||||||
|
|
||||||
def jdefault(o):
|
def jdefault(o):
|
||||||
@ -88,14 +88,14 @@ Kind regards,
|
|||||||
|
|
||||||
def SendMail(logger, project, sender, to, fullname, subject, text):
|
def SendMail(logger, project, sender, to, fullname, subject, text):
|
||||||
try:
|
try:
|
||||||
xmailer = '{} - Problem Notification'.format(project)
|
xmailer = f'{project} - Problem Notification'
|
||||||
to = email.utils.formataddr((fullname, to))
|
to = email.utils.formataddr((fullname, to))
|
||||||
mail_send_with_details(sender=sender, to=to,
|
mail_send_with_details(sender=sender, to=to,
|
||||||
subject=subject, text=text, xmailer=xmailer,
|
subject=subject, text=text, xmailer=xmailer,
|
||||||
relay=args.relay, dry=args.dry)
|
relay=args.relay, dry=args.dry)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
logger.error("Failed to send an email to %s (%s)" % (fullname, to))
|
logger.error(f"Failed to send an email to {fullname} ({to})")
|
||||||
|
|
||||||
|
|
||||||
def check_reminder(pname, first, problem, now, Reminded, RemindedLoaded):
|
def check_reminder(pname, first, problem, now, Reminded, RemindedLoaded):
|
||||||
@ -144,7 +144,7 @@ def main(args):
|
|||||||
global project
|
global project
|
||||||
project = args.project
|
project = args.project
|
||||||
|
|
||||||
logger.debug('loading build fails for %s' % project)
|
logger.debug(f'loading build fails for {project}')
|
||||||
url = osc.core.makeurl(apiurl, ['source', f'{project}:Staging', 'dashboard', f'rebuildpacs.{project}-standard.yaml'])
|
url = osc.core.makeurl(apiurl, ['source', f'{project}:Staging', 'dashboard', f'rebuildpacs.{project}-standard.yaml'])
|
||||||
try:
|
try:
|
||||||
_data = osc.core.http_GET(url)
|
_data = osc.core.http_GET(url)
|
||||||
@ -162,7 +162,7 @@ def main(args):
|
|||||||
|
|
||||||
reminded_json = args.json
|
reminded_json = args.json
|
||||||
if not reminded_json:
|
if not reminded_json:
|
||||||
reminded_json = '{}.reminded.json'.format(project)
|
reminded_json = f'{project}.reminded.json'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(reminded_json) as json_data:
|
with open(reminded_json) as json_data:
|
||||||
@ -225,7 +225,7 @@ def main(args):
|
|||||||
for userid in maintainers:
|
for userid in maintainers:
|
||||||
to = Person[userid][2]
|
to = Person[userid][2]
|
||||||
fullname = Person[userid][1]
|
fullname = Person[userid][1]
|
||||||
subject = '%s - %s - Build problem notification' % (project, package)
|
subject = f'{project} - {package} - Build problem notification'
|
||||||
text = MAIL_TEMPLATES[Reminded[package].remindCount - 1] % {
|
text = MAIL_TEMPLATES[Reminded[package].remindCount - 1] % {
|
||||||
'recipient': fullname,
|
'recipient': fullname,
|
||||||
'sender': sender,
|
'sender': sender,
|
||||||
@ -250,11 +250,11 @@ def main(args):
|
|||||||
ProjectComplainList.sort()
|
ProjectComplainList.sort()
|
||||||
to = 'factory@lists.opensuse.org'
|
to = 'factory@lists.opensuse.org'
|
||||||
fullname = "openSUSE Factory - Mailing List"
|
fullname = "openSUSE Factory - Mailing List"
|
||||||
subject = "%(project)s - Build fail notification" % {'project': project}
|
subject = f"{project} - Build fail notification"
|
||||||
|
|
||||||
text = u"""Dear Package maintainers and hackers.
|
text = f"""Dear Package maintainers and hackers.
|
||||||
|
|
||||||
Below package(s) in %(project)s have had problems for at
|
Below package(s) in {project} have had problems for at
|
||||||
least 4 weeks. We tried to send out notifications to the
|
least 4 weeks. We tried to send out notifications to the
|
||||||
configured bugowner/maintainers of the package(s), but so far no
|
configured bugowner/maintainers of the package(s), but so far no
|
||||||
fix has been submitted. This probably means that the
|
fix has been submitted. This probably means that the
|
||||||
@ -262,16 +262,16 @@ maintainer/bugowner did not yet find the time to look into the
|
|||||||
matter and he/she would certainly appreciate help to get this
|
matter and he/she would certainly appreciate help to get this
|
||||||
sorted.
|
sorted.
|
||||||
|
|
||||||
""" % {'project': project}
|
"""
|
||||||
for pkg in ProjectComplainList:
|
for pkg in ProjectComplainList:
|
||||||
text += "- %s: %s\n" % (pkg, Reminded[pkg].problem)
|
text += f"- {pkg}: {Reminded[pkg].problem}\n"
|
||||||
text += u"""
|
text += f"""
|
||||||
Unless somebody is stepping up and submitting fixes, the listed
|
Unless somebody is stepping up and submitting fixes, the listed
|
||||||
package(s) are going to be removed from %(project)s.
|
package(s) are going to be removed from {project}.
|
||||||
|
|
||||||
Kind regards,
|
Kind regards,
|
||||||
%(sender)s
|
{sender}
|
||||||
""" % {'project': project, 'sender': sender}
|
"""
|
||||||
SendMail(logger, project, sender, to, fullname, subject, text)
|
SendMail(logger, project, sender, to, fullname, subject, text)
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ class MaintenanceChecker(ReviewBot.ReviewBot):
|
|||||||
if prj.startswith('openSUSE:Leap') or prj.startswith('openSUSE:1'):
|
if prj.startswith('openSUSE:Leap') or prj.startswith('openSUSE:1'):
|
||||||
self.logger.debug("%s looks wrong as maintainer, skipped", prj)
|
self.logger.debug("%s looks wrong as maintainer, skipped", prj)
|
||||||
continue
|
continue
|
||||||
msg = 'Submission for {} by someone who is not maintainer in the devel project ({}). Please review'.format(pkg, prj)
|
msg = f'Submission for {pkg} by someone who is not maintainer in the devel project ({prj}). Please review'
|
||||||
self.add_review(req, by_project=prj, by_package=pkg, msg=msg)
|
self.add_review(req, by_project=prj, by_package=pkg, msg=msg)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -75,16 +75,16 @@ class MaintenanceChecker(ReviewBot.ReviewBot):
|
|||||||
if project.startswith('openSUSE:Leap:') and hasattr(a, 'src_project'):
|
if project.startswith('openSUSE:Leap:') and hasattr(a, 'src_project'):
|
||||||
mapping = MaintenanceChecker._get_lookup_yml(self.apiurl, project)
|
mapping = MaintenanceChecker._get_lookup_yml(self.apiurl, project)
|
||||||
if mapping is None:
|
if mapping is None:
|
||||||
self.logger.error("error loading mapping for {}".format(project))
|
self.logger.error(f"error loading mapping for {project}")
|
||||||
elif pkgname not in mapping:
|
elif pkgname not in mapping:
|
||||||
self.logger.debug("{} not tracked".format(pkgname))
|
self.logger.debug(f"{pkgname} not tracked")
|
||||||
else:
|
else:
|
||||||
origin = mapping[pkgname]
|
origin = mapping[pkgname]
|
||||||
self.logger.debug("{} comes from {}, submitted from {}".format(pkgname, origin, a.src_project))
|
self.logger.debug(f"{pkgname} comes from {origin}, submitted from {a.src_project}")
|
||||||
if origin.startswith('SUSE:SLE-12') and a.src_project.startswith('SUSE:SLE-12') \
|
if origin.startswith('SUSE:SLE-12') and a.src_project.startswith('SUSE:SLE-12') \
|
||||||
or origin.startswith('SUSE:SLE-15') and a.src_project.startswith('SUSE:SLE-15') \
|
or origin.startswith('SUSE:SLE-15') and a.src_project.startswith('SUSE:SLE-15') \
|
||||||
or origin.startswith('openSUSE:Leap') and a.src_project.startswith('openSUSE:Leap'):
|
or origin.startswith('openSUSE:Leap') and a.src_project.startswith('openSUSE:Leap'):
|
||||||
self.logger.info("{} submitted from {}, no maintainer review needed".format(pkgname, a.src_project))
|
self.logger.info(f"{pkgname} submitted from {a.src_project}, no maintainer review needed")
|
||||||
return
|
return
|
||||||
|
|
||||||
maintainers = set(maintainers_get(self.apiurl, project, pkgname))
|
maintainers = set(maintainers_get(self.apiurl, project, pkgname))
|
||||||
@ -92,18 +92,18 @@ class MaintenanceChecker(ReviewBot.ReviewBot):
|
|||||||
known_maintainer = False
|
known_maintainer = False
|
||||||
for m in maintainers:
|
for m in maintainers:
|
||||||
if author == m:
|
if author == m:
|
||||||
self.logger.debug("%s is maintainer" % author)
|
self.logger.debug(f"{author} is maintainer")
|
||||||
known_maintainer = True
|
known_maintainer = True
|
||||||
if not known_maintainer:
|
if not known_maintainer:
|
||||||
for r in req.reviews:
|
for r in req.reviews:
|
||||||
if r.by_user in maintainers:
|
if r.by_user in maintainers:
|
||||||
self.logger.debug("found %s as reviewer" % r.by_user)
|
self.logger.debug(f"found {r.by_user} as reviewer")
|
||||||
known_maintainer = True
|
known_maintainer = True
|
||||||
if not known_maintainer:
|
if not known_maintainer:
|
||||||
self.logger.debug("author: %s, maintainers: %s => need review" % (author, ','.join(maintainers)))
|
self.logger.debug(f"author: {author}, maintainers: {','.join(maintainers)} => need review")
|
||||||
self.needs_maintainer_review.add(pkgname)
|
self.needs_maintainer_review.add(pkgname)
|
||||||
else:
|
else:
|
||||||
self.logger.warning("%s doesn't have maintainers" % pkgname)
|
self.logger.warning(f"{pkgname} doesn't have maintainers")
|
||||||
self.needs_maintainer_review.add(pkgname)
|
self.needs_maintainer_review.add(pkgname)
|
||||||
|
|
||||||
def check_action_maintenance_incident(self, req, a):
|
def check_action_maintenance_incident(self, req, a):
|
||||||
|
@ -144,7 +144,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
elif (kind is not None and kind != 'source'):
|
elif (kind is not None and kind != 'source'):
|
||||||
self.review_messages['declined'] = 'May not modify a non-source package of type {}'.format(kind)
|
self.review_messages['declined'] = f'May not modify a non-source package of type {kind}'
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not self.allow_source_in_sle and self.sle_project_to_check:
|
if not self.allow_source_in_sle and self.sle_project_to_check:
|
||||||
@ -219,7 +219,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
req = self.__ensure_add_role_request(source_project)
|
req = self.__ensure_add_role_request(source_project)
|
||||||
if req:
|
if req:
|
||||||
declined_msg += ' Created the add_role request %s for addressing this problem.' % req
|
declined_msg += f' Created the add_role request {req} for addressing this problem.'
|
||||||
|
|
||||||
self.review_messages['declined'] = declined_msg
|
self.review_messages['declined'] = declined_msg
|
||||||
return False
|
return False
|
||||||
@ -229,9 +229,9 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Checkout and see if renaming package screws up version parsing.
|
# Checkout and see if renaming package screws up version parsing.
|
||||||
copath = os.path.expanduser('~/co/%s' % self.request.reqid)
|
copath = os.path.expanduser(f'~/co/{self.request.reqid}')
|
||||||
if os.path.exists(copath):
|
if os.path.exists(copath):
|
||||||
self.logger.warning('directory %s already exists' % copath)
|
self.logger.warning(f'directory {copath} already exists')
|
||||||
shutil.rmtree(copath)
|
shutil.rmtree(copath)
|
||||||
os.makedirs(copath)
|
os.makedirs(copath)
|
||||||
os.chdir(copath)
|
os.chdir(copath)
|
||||||
@ -243,7 +243,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
os.rename(target_package, '_old')
|
os.rename(target_package, '_old')
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
if e.code == 404:
|
if e.code == 404:
|
||||||
self.logger.info('target package does not exist %s/%s' % (target_project, target_package))
|
self.logger.info(f'target package does not exist {target_project}/{target_package}')
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
@ -304,21 +304,21 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
known_maintainer = False
|
known_maintainer = False
|
||||||
if maintainers:
|
if maintainers:
|
||||||
if submitter in maintainers:
|
if submitter in maintainers:
|
||||||
self.logger.debug("%s is maintainer" % submitter)
|
self.logger.debug(f"{submitter} is maintainer")
|
||||||
known_maintainer = True
|
known_maintainer = True
|
||||||
if not known_maintainer:
|
if not known_maintainer:
|
||||||
for r in self.request.reviews:
|
for r in self.request.reviews:
|
||||||
if r.by_user in maintainers:
|
if r.by_user in maintainers:
|
||||||
self.logger.debug("found %s as reviewer" % r.by_user)
|
self.logger.debug(f"found {r.by_user} as reviewer")
|
||||||
known_maintainer = True
|
known_maintainer = True
|
||||||
if not known_maintainer:
|
if not known_maintainer:
|
||||||
self.logger.warning("submitter: %s, maintainers: %s => need review" % (submitter, ','.join(maintainers)))
|
self.logger.warning(f"submitter: {submitter}, maintainers: {','.join(maintainers)} => need review")
|
||||||
self.logger.debug("adding review to %s/%s" % (devel_project, devel_package))
|
self.logger.debug(f"adding review to {devel_project}/{devel_package}")
|
||||||
msg = ('Submission for {} by someone who is not maintainer in '
|
msg = ('Submission for {} by someone who is not maintainer in '
|
||||||
'the devel project ({}). Please review').format(target_package, devel_project)
|
'the devel project ({}). Please review').format(target_package, devel_project)
|
||||||
self.add_review(self.request, by_project=devel_project, by_package=devel_package, msg=msg)
|
self.add_review(self.request, by_project=devel_project, by_package=devel_package, msg=msg)
|
||||||
else:
|
else:
|
||||||
self.logger.warning("%s doesn't have devel project" % target_package)
|
self.logger.warning(f"{target_package} doesn't have devel project")
|
||||||
|
|
||||||
if self.only_changes():
|
if self.only_changes():
|
||||||
self.logger.debug('only .changes modifications')
|
self.logger.debug('only .changes modifications')
|
||||||
@ -338,7 +338,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
# Allow any projects already used as devel projects for other packages.
|
# Allow any projects already used as devel projects for other packages.
|
||||||
search = {
|
search = {
|
||||||
'package': "@project='%s' and devel/@project='%s'" % (target_project, source_project),
|
'package': f"@project='{target_project}' and devel/@project='{source_project}'",
|
||||||
}
|
}
|
||||||
result = osc.core.search(self.apiurl, **search)
|
result = osc.core.search(self.apiurl, **search)
|
||||||
return result['package'].attrib['matches'] != '0'
|
return result['package'].attrib['matches'] != '0'
|
||||||
@ -455,7 +455,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
source_project - source project name
|
source_project - source project name
|
||||||
"""
|
"""
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
'Checking required maintainer from the source project (%s)' % self.required_maintainer
|
f'Checking required maintainer from the source project ({self.required_maintainer})'
|
||||||
)
|
)
|
||||||
if not self.required_maintainer:
|
if not self.required_maintainer:
|
||||||
return True
|
return True
|
||||||
@ -475,12 +475,12 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
if len(add_roles) > 0:
|
if len(add_roles) > 0:
|
||||||
return add_roles[0].reqid
|
return add_roles[0].reqid
|
||||||
else:
|
else:
|
||||||
add_role_msg = 'Created automatically from request %s' % self.request.reqid
|
add_role_msg = f'Created automatically from request {self.request.reqid}'
|
||||||
return create_add_role_request(self.apiurl, source_project, self.required_maintainer,
|
return create_add_role_request(self.apiurl, source_project, self.required_maintainer,
|
||||||
'maintainer', message=add_role_msg)
|
'maintainer', message=add_role_msg)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'Cannot create the corresponding add_role request for %s: %s' % (self.request.reqid, e)
|
f'Cannot create the corresponding add_role request for {self.request.reqid}: {e}'
|
||||||
)
|
)
|
||||||
|
|
||||||
def __is_required_maintainer(self, request):
|
def __is_required_maintainer(self, request):
|
||||||
@ -516,7 +516,7 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
try:
|
try:
|
||||||
xml = ET.parse(osc.core.http_GET(url)).getroot()
|
xml = ET.parse(osc.core.http_GET(url)).getroot()
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
self.logger.error('ERROR in URL %s [%s]' % (url, e))
|
self.logger.error(f'ERROR in URL {url} [{e}]')
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
if xml.find('error') is not None:
|
if xml.find('error') is not None:
|
||||||
@ -552,11 +552,11 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
def check_action_add_role(self, request, action):
|
def check_action_add_role(self, request, action):
|
||||||
# Decline add_role request (assumed the bot acting on requests to Factory or similar).
|
# Decline add_role request (assumed the bot acting on requests to Factory or similar).
|
||||||
message = 'Roles to packages are granted in the devel project, not in %s.' % action.tgt_project
|
message = f'Roles to packages are granted in the devel project, not in {action.tgt_project}.'
|
||||||
|
|
||||||
if action.tgt_package is not None:
|
if action.tgt_package is not None:
|
||||||
project, package = devel_project_fallback(self.apiurl, action.tgt_project, action.tgt_package)
|
project, package = devel_project_fallback(self.apiurl, action.tgt_project, action.tgt_package)
|
||||||
message += ' Send this request to {}/{}.'.format(project, package)
|
message += f' Send this request to {project}/{package}.'
|
||||||
|
|
||||||
self.review_messages['declined'] = message
|
self.review_messages['declined'] = message
|
||||||
return False
|
return False
|
||||||
@ -595,13 +595,13 @@ class CheckSource(ReviewBot.ReviewBot):
|
|||||||
if linked.get('project', action.tgt_project) != action.tgt_project:
|
if linked.get('project', action.tgt_project) != action.tgt_project:
|
||||||
return True
|
return True
|
||||||
linked_package = linked.get('package')
|
linked_package = linked.get('package')
|
||||||
self.review_messages['declined'] = "Delete the package %s instead" % (linked_package)
|
self.review_messages['declined'] = f"Delete the package {linked_package} instead"
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_action_delete_project(self, request, action):
|
def check_action_delete_project(self, request, action):
|
||||||
# Presumably if the request is valid the bot should be disabled or
|
# Presumably if the request is valid the bot should be disabled or
|
||||||
# overridden, but seems like no valid case for allowing this (see #1696).
|
# overridden, but seems like no valid case for allowing this (see #1696).
|
||||||
self.review_messages['declined'] = 'Deleting the {} project is not allowed.'.format(action.tgt_project)
|
self.review_messages['declined'] = f'Deleting the {action.tgt_project} project is not allowed.'
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def check_action_delete_repository(self, request, action):
|
def check_action_delete_repository(self, request, action):
|
||||||
|
@ -36,27 +36,27 @@ class FactorySourceChecker(ReviewBot.ReviewBot):
|
|||||||
if src_srcinfo is None:
|
if src_srcinfo is None:
|
||||||
# source package does not exist?
|
# source package does not exist?
|
||||||
# handle here to avoid crashing on the next line
|
# handle here to avoid crashing on the next line
|
||||||
self.logger.info("Could not get source info for %s/%s@%s" % (src_project, src_package, src_rev))
|
self.logger.info(f"Could not get source info for {src_project}/{src_package}@{src_rev}")
|
||||||
return False
|
return False
|
||||||
projects = self._package_get_upstream_projects(target_package)
|
projects = self._package_get_upstream_projects(target_package)
|
||||||
if projects is None:
|
if projects is None:
|
||||||
self.logger.error("no upstream project found for {}, can't check".format(target_package))
|
self.logger.error(f"no upstream project found for {target_package}, can't check")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.review_messages['declined'] = 'the package needs to be accepted in {} first'.format(' or '.join(projects))
|
self.review_messages['declined'] = f"the package needs to be accepted in {' or '.join(projects)} first"
|
||||||
for project in projects:
|
for project in projects:
|
||||||
self.logger.info("Checking in project %s" % project)
|
self.logger.info(f"Checking in project {project}")
|
||||||
good = self._check_matching_srcmd5(project, target_package, src_srcinfo.verifymd5, self.history_limit)
|
good = self._check_matching_srcmd5(project, target_package, src_srcinfo.verifymd5, self.history_limit)
|
||||||
if good:
|
if good:
|
||||||
self.logger.info("{} is in {}".format(target_package, project))
|
self.logger.info(f"{target_package} is in {project}")
|
||||||
return good
|
return good
|
||||||
|
|
||||||
good = self._check_requests(project, target_package, src_srcinfo.verifymd5)
|
good = self._check_requests(project, target_package, src_srcinfo.verifymd5)
|
||||||
if good:
|
if good:
|
||||||
self.logger.info("{} already reviewed for {}".format(target_package, project))
|
self.logger.info(f"{target_package} already reviewed for {project}")
|
||||||
|
|
||||||
if not good:
|
if not good:
|
||||||
self.logger.info('{} failed source submission check'.format(target_package))
|
self.logger.info(f'{target_package} failed source submission check')
|
||||||
|
|
||||||
return good
|
return good
|
||||||
|
|
||||||
@ -167,7 +167,7 @@ by OBS on which this bot relies.
|
|||||||
|
|
||||||
def isNewPackage(self, tgt_project, tgt_package):
|
def isNewPackage(self, tgt_project, tgt_package):
|
||||||
try:
|
try:
|
||||||
self.logger.debug("package_meta %s %s/%s" % (self.apiurl, tgt_project, tgt_package))
|
self.logger.debug(f"package_meta {self.apiurl} {tgt_project}/{tgt_package}")
|
||||||
osc.core.show_package_meta(self.apiurl, tgt_project, tgt_package)
|
osc.core.show_package_meta(self.apiurl, tgt_project, tgt_package)
|
||||||
except (HTTPError, URLError):
|
except (HTTPError, URLError):
|
||||||
return True
|
return True
|
||||||
@ -200,7 +200,7 @@ by OBS on which this bot relies.
|
|||||||
self.logger.debug("reject: diff contains no tags")
|
self.logger.debug("reject: diff contains no tags")
|
||||||
return False
|
return False
|
||||||
if deleted > 0:
|
if deleted > 0:
|
||||||
self.review_messages['declined'] = '{} issue reference(s) deleted'.format(deleted)
|
self.review_messages['declined'] = f'{deleted} issue reference(s) deleted'
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ for arg in args:
|
|||||||
elif re.search(r'packages', arg):
|
elif re.search(r'packages', arg):
|
||||||
repo.add_susetags(argf, 0, None)
|
repo.add_susetags(argf, 0, None)
|
||||||
else:
|
else:
|
||||||
print("%s: unknown repo type" % (arg))
|
print(f"{arg}: unknown repo type")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# we only want self-provides
|
# we only want self-provides
|
||||||
@ -59,4 +59,4 @@ for p in firstrepo.solvables:
|
|||||||
src = p.lookup_str(solv.SOLVABLE_SOURCENAME)
|
src = p.lookup_str(solv.SOLVABLE_SOURCENAME)
|
||||||
if src is None:
|
if src is None:
|
||||||
src = "?"
|
src = "?"
|
||||||
print("%s: %s is older than %s from %s" % (src, p, pp, pp.repo))
|
print(f"{src}: {p} is older than {pp} from {pp.repo}")
|
||||||
|
@ -60,7 +60,7 @@ class CompareList(object):
|
|||||||
apiurl = 'https://api.suse.de'
|
apiurl = 'https://api.suse.de'
|
||||||
else:
|
else:
|
||||||
apiurl = self.apiurl
|
apiurl = self.apiurl
|
||||||
query = "match=state/@name='accepted'+and+(action/target/@project='{}'+and+action/@type='delete')".format(project)
|
query = f"match=state/@name='accepted'+and+(action/target/@project='{project}'+and+action/@type='delete')"
|
||||||
url = makeurl(apiurl, ['search', 'request'], query)
|
url = makeurl(apiurl, ['search', 'request'], query)
|
||||||
f = http_GET(url)
|
f = http_GET(url)
|
||||||
root = ET.parse(f).getroot()
|
root = ET.parse(f).getroot()
|
||||||
@ -77,12 +77,12 @@ class CompareList(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
for linked in links:
|
for linked in links:
|
||||||
if linked.get('project') == project and linked.get('package').startswith("%s." % package):
|
if linked.get('project') == project and linked.get('package').startswith(f"{package}."):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def check_diff(self, package, old_prj, new_prj):
|
def check_diff(self, package, old_prj, new_prj):
|
||||||
logging.debug('checking %s ...' % package)
|
logging.debug(f'checking {package} ...')
|
||||||
query = {'cmd': 'diff',
|
query = {'cmd': 'diff',
|
||||||
'view': 'xml',
|
'view': 'xml',
|
||||||
'oproject': old_prj,
|
'oproject': old_prj,
|
||||||
@ -90,9 +90,9 @@ class CompareList(object):
|
|||||||
u = makeurl(self.apiurl, ['source', new_prj, package], query=query)
|
u = makeurl(self.apiurl, ['source', new_prj, package], query=query)
|
||||||
root = ET.parse(http_POST(u)).getroot()
|
root = ET.parse(http_POST(u)).getroot()
|
||||||
old_srcmd5 = root.findall('old')[0].get('srcmd5')
|
old_srcmd5 = root.findall('old')[0].get('srcmd5')
|
||||||
logging.debug('%s old srcmd5 %s in %s' % (package, old_srcmd5, old_prj))
|
logging.debug(f'{package} old srcmd5 {old_srcmd5} in {old_prj}')
|
||||||
new_srcmd5 = root.findall('new')[0].get('srcmd5')
|
new_srcmd5 = root.findall('new')[0].get('srcmd5')
|
||||||
logging.debug('%s new srcmd5 %s in %s' % (package, new_srcmd5, new_prj))
|
logging.debug(f'{package} new srcmd5 {new_srcmd5} in {new_prj}')
|
||||||
# Compare srcmd5
|
# Compare srcmd5
|
||||||
if old_srcmd5 != new_srcmd5:
|
if old_srcmd5 != new_srcmd5:
|
||||||
# check if it has diff element
|
# check if it has diff element
|
||||||
@ -104,13 +104,13 @@ class CompareList(object):
|
|||||||
def submit_new_package(self, source, target, package, msg=None):
|
def submit_new_package(self, source, target, package, msg=None):
|
||||||
req = osc.core.get_request_list(self.apiurl, target, package, req_state=('new', 'review', 'declined'))
|
req = osc.core.get_request_list(self.apiurl, target, package, req_state=('new', 'review', 'declined'))
|
||||||
if req:
|
if req:
|
||||||
print("There is a request to %s / %s already, skip!" % (target, package))
|
print(f"There is a request to {target} / {package} already, skip!")
|
||||||
else:
|
else:
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = 'New package submitted by compare_pkglist'
|
msg = 'New package submitted by compare_pkglist'
|
||||||
res = osc.core.create_submit_request(self.apiurl, source, package, target, package, message=msg)
|
res = osc.core.create_submit_request(self.apiurl, source, package, target, package, message=msg)
|
||||||
if res and res is not None:
|
if res and res is not None:
|
||||||
print('Created request %s for %s' % (res, package))
|
print(f'Created request {res} for {package}')
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
print('Error occurred when creating the submit request')
|
print('Error occurred when creating the submit request')
|
||||||
@ -124,20 +124,20 @@ class CompareList(object):
|
|||||||
return
|
return
|
||||||
if self.submitfrom and self.submitto:
|
if self.submitfrom and self.submitto:
|
||||||
if not self.item_exists(self.submitfrom):
|
if not self.item_exists(self.submitfrom):
|
||||||
print("Project %s is not exist" % self.submitfrom)
|
print(f"Project {self.submitfrom} is not exist")
|
||||||
return
|
return
|
||||||
if not self.item_exists(self.submitto):
|
if not self.item_exists(self.submitto):
|
||||||
print("Project %s is not exist" % self.submitto)
|
print(f"Project {self.submitto} is not exist")
|
||||||
return
|
return
|
||||||
|
|
||||||
# get souce packages from target
|
# get souce packages from target
|
||||||
print('Gathering the package list from %s' % self.old_prj)
|
print(f'Gathering the package list from {self.old_prj}')
|
||||||
source = self.get_source_packages(self.old_prj)
|
source = self.get_source_packages(self.old_prj)
|
||||||
print('Gathering the package list from %s' % self.new_prj)
|
print(f'Gathering the package list from {self.new_prj}')
|
||||||
target = self.get_source_packages(self.new_prj)
|
target = self.get_source_packages(self.new_prj)
|
||||||
removed_packages = self.removed_pkglist(self.old_prj)
|
removed_packages = self.removed_pkglist(self.old_prj)
|
||||||
if self.existin:
|
if self.existin:
|
||||||
print('Gathering the package list from %s' % self.existin)
|
print(f'Gathering the package list from {self.existin}')
|
||||||
existin_packages = self.get_source_packages(self.existin)
|
existin_packages = self.get_source_packages(self.existin)
|
||||||
|
|
||||||
if not self.removedonly:
|
if not self.removedonly:
|
||||||
@ -162,10 +162,10 @@ class CompareList(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if pkg in removed_pkgs_in_target:
|
if pkg in removed_pkgs_in_target:
|
||||||
print("New package but has removed from {:<8} - {}".format(self.new_prj, pkg))
|
print(f"New package but has removed from {self.new_prj:<8} - {pkg}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print("New package than {:<8} - {}".format(self.new_prj, pkg))
|
print(f"New package than {self.new_prj:<8} - {pkg}")
|
||||||
|
|
||||||
if self.submit:
|
if self.submit:
|
||||||
if self.submit_limit and submit_counter > int(self.submit_limit):
|
if self.submit_limit and submit_counter > int(self.submit_limit):
|
||||||
@ -173,11 +173,11 @@ class CompareList(object):
|
|||||||
|
|
||||||
if self.submitfrom and self.submitto:
|
if self.submitfrom and self.submitto:
|
||||||
if not self.item_exists(self.submitfrom, pkg):
|
if not self.item_exists(self.submitfrom, pkg):
|
||||||
print("%s not found in %s" % (pkg, self.submitfrom))
|
print(f"{pkg} not found in {self.submitfrom}")
|
||||||
continue
|
continue
|
||||||
msg = "Automated submission of a package from %s to %s" % (self.submitfrom, self.submitto)
|
msg = f"Automated submission of a package from {self.submitfrom} to {self.submitto}"
|
||||||
if self.existin:
|
if self.existin:
|
||||||
msg += " that was included in %s" % (self.existin)
|
msg += f" that was included in {self.existin}"
|
||||||
if self.submit_new_package(self.submitfrom, self.submitto, pkg, msg):
|
if self.submit_new_package(self.submitfrom, self.submitto, pkg, msg):
|
||||||
submit_counter += 1
|
submit_counter += 1
|
||||||
else:
|
else:
|
||||||
@ -188,13 +188,13 @@ class CompareList(object):
|
|||||||
elif not self.newonly:
|
elif not self.newonly:
|
||||||
diff = self.check_diff(pkg, self.old_prj, self.new_prj)
|
diff = self.check_diff(pkg, self.old_prj, self.new_prj)
|
||||||
if diff:
|
if diff:
|
||||||
print("Different source in {:<8} - {}".format(self.new_prj, pkg))
|
print(f"Different source in {self.new_prj:<8} - {pkg}")
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("=== Diff ===\n{}".format(diff))
|
print(f"=== Diff ===\n{diff}")
|
||||||
|
|
||||||
for pkg in removed_packages:
|
for pkg in removed_packages:
|
||||||
if pkg in target:
|
if pkg in target:
|
||||||
print("Deleted package in {:<8} - {}".format(self.old_prj, pkg))
|
print(f"Deleted package in {self.old_prj:<8} - {pkg}")
|
||||||
|
|
||||||
|
|
||||||
def main(args):
|
def main(args):
|
||||||
@ -214,10 +214,10 @@ if __name__ == '__main__':
|
|||||||
parser.add_argument('-d', '--debug', action='store_true',
|
parser.add_argument('-d', '--debug', action='store_true',
|
||||||
help='print info useful for debuging')
|
help='print info useful for debuging')
|
||||||
parser.add_argument('-o', '--old', dest='old_prj', metavar='PROJECT',
|
parser.add_argument('-o', '--old', dest='old_prj', metavar='PROJECT',
|
||||||
help='the old project where to compare (default: %s)' % SLE,
|
help=f'the old project where to compare (default: {SLE})',
|
||||||
default=SLE)
|
default=SLE)
|
||||||
parser.add_argument('-n', '--new', dest='new_prj', metavar='PROJECT',
|
parser.add_argument('-n', '--new', dest='new_prj', metavar='PROJECT',
|
||||||
help='the new project where to compare (default: %s)' % OPENSUSE,
|
help=f'the new project where to compare (default: {OPENSUSE})',
|
||||||
default=OPENSUSE)
|
default=OPENSUSE)
|
||||||
parser.add_argument('-v', '--verbose', action='store_true',
|
parser.add_argument('-v', '--verbose', action='store_true',
|
||||||
help='show the diff')
|
help='show the diff')
|
||||||
|
@ -68,11 +68,11 @@ class ContainerCleaner(ToolBase.ToolBase):
|
|||||||
if len(bins) > 0:
|
if len(bins) > 0:
|
||||||
match = regex_srccontainer.match(buildcontainer)
|
match = regex_srccontainer.match(buildcontainer)
|
||||||
if not match:
|
if not match:
|
||||||
raise Exception("Could not map %s to source container" % buildcontainer)
|
raise Exception(f"Could not map {buildcontainer} to source container")
|
||||||
|
|
||||||
srccontainer = match.group(1)
|
srccontainer = match.group(1)
|
||||||
if srccontainer not in srccontainers:
|
if srccontainer not in srccontainers:
|
||||||
raise Exception("Mapped %s to wrong source container (%s)" % (buildcontainer, srccontainer))
|
raise Exception(f"Mapped {buildcontainer} to wrong source container ({srccontainer})")
|
||||||
|
|
||||||
if srccontainer not in srccontainerarchs:
|
if srccontainer not in srccontainerarchs:
|
||||||
srccontainerarchs[srccontainer] = []
|
srccontainerarchs[srccontainer] = []
|
||||||
|
32
deptool.py
32
deptool.py
@ -68,7 +68,7 @@ class DepTool(cmdln.Cmdln):
|
|||||||
name = os.path.basename(os.path.splitext(r)[0])
|
name = os.path.basename(os.path.splitext(r)[0])
|
||||||
repo = self.pool.add_repo(name)
|
repo = self.pool.add_repo(name)
|
||||||
repo.add_solv(r)
|
repo.add_solv(r)
|
||||||
logger.debug("add repo %s" % name)
|
logger.debug(f"add repo {name}")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
if r.endswith('.repo'):
|
if r.endswith('.repo'):
|
||||||
@ -82,7 +82,7 @@ class DepTool(cmdln.Cmdln):
|
|||||||
repo.add_solv(solvfile % name)
|
repo.add_solv(solvfile % name)
|
||||||
if parser.has_option(name, 'priority'):
|
if parser.has_option(name, 'priority'):
|
||||||
repo.priority = parser.getint(name, 'priority')
|
repo.priority = parser.getint(name, 'priority')
|
||||||
logger.debug("add repo %s" % name)
|
logger.debug(f"add repo {name}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
@ -130,14 +130,14 @@ class DepTool(cmdln.Cmdln):
|
|||||||
sel = self.pool.select(str(lock), solv.Selection.SELECTION_NAME)
|
sel = self.pool.select(str(lock), solv.Selection.SELECTION_NAME)
|
||||||
if sel.isempty():
|
if sel.isempty():
|
||||||
# if we can't find it, it probably is not as important
|
# if we can't find it, it probably is not as important
|
||||||
logger.debug('locked package {} not found'.format(lock))
|
logger.debug(f'locked package {lock} not found')
|
||||||
else:
|
else:
|
||||||
jobs += sel.jobs(solv.Job.SOLVER_LOCK)
|
jobs += sel.jobs(solv.Job.SOLVER_LOCK)
|
||||||
|
|
||||||
for n in packages:
|
for n in packages:
|
||||||
sel = self.pool.select(str(n), solv.Selection.SELECTION_NAME)
|
sel = self.pool.select(str(n), solv.Selection.SELECTION_NAME)
|
||||||
if sel.isempty():
|
if sel.isempty():
|
||||||
logger.error('package {} not found'.format(n))
|
logger.error(f'package {n} not found')
|
||||||
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
||||||
|
|
||||||
solver = self.pool.Solver()
|
solver = self.pool.Solver()
|
||||||
@ -166,13 +166,13 @@ class DepTool(cmdln.Cmdln):
|
|||||||
if reason == solv.Solver.SOLVER_REASON_WEAKDEP:
|
if reason == solv.Solver.SOLVER_REASON_WEAKDEP:
|
||||||
for v in solver.describe_weakdep_decision(s):
|
for v in solver.describe_weakdep_decision(s):
|
||||||
reason2, s2, dep = v
|
reason2, s2, dep = v
|
||||||
print("-> %s %s %s" % (s2.name, REASONS[reason2], dep))
|
print(f"-> {s2.name} {REASONS[reason2]} {dep}")
|
||||||
else:
|
else:
|
||||||
print("-> %s %s %s" % (s.name, REASONS[reason], ruleinfo))
|
print(f"-> {s.name} {REASONS[reason]} {ruleinfo}")
|
||||||
|
|
||||||
if opts.size:
|
if opts.size:
|
||||||
size = trans.calc_installsizechange()
|
size = trans.calc_installsizechange()
|
||||||
print("SIZE %s" % (size))
|
print(f"SIZE {size}")
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -212,13 +212,13 @@ class DepTool(cmdln.Cmdln):
|
|||||||
if sel.isempty():
|
if sel.isempty():
|
||||||
logger.error("%s not found", n)
|
logger.error("%s not found", n)
|
||||||
for s in sel.solvables():
|
for s in sel.solvables():
|
||||||
print('- {}-{}@{}:'.format(s.name, s.evr, s.arch))
|
print(f'- {s.name}-{s.evr}@{s.arch}:')
|
||||||
for kind in ('RECOMMENDS', 'REQUIRES', 'SUPPLEMENTS', 'ENHANCES', 'PROVIDES', 'SUGGESTS'):
|
for kind in ('RECOMMENDS', 'REQUIRES', 'SUPPLEMENTS', 'ENHANCES', 'PROVIDES', 'SUGGESTS'):
|
||||||
deps = s.lookup_deparray(getattr(solv, 'SOLVABLE_' + kind), 0)
|
deps = s.lookup_deparray(getattr(solv, 'SOLVABLE_' + kind), 0)
|
||||||
if deps:
|
if deps:
|
||||||
print(' {}:'.format(kind))
|
print(f' {kind}:')
|
||||||
for dep in deps:
|
for dep in deps:
|
||||||
print(' - {}'.format(dep))
|
print(f' - {dep}')
|
||||||
|
|
||||||
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
||||||
help="repo to use")
|
help="repo to use")
|
||||||
@ -234,7 +234,7 @@ class DepTool(cmdln.Cmdln):
|
|||||||
for r in relation:
|
for r in relation:
|
||||||
i = self.pool.str2id(r)
|
i = self.pool.str2id(r)
|
||||||
for s in self.pool.whatprovides(i):
|
for s in self.pool.whatprovides(i):
|
||||||
print('- {}-{}@{}:'.format(s.name, s.evr, s.arch))
|
print(f'- {s.name}-{s.evr}@{s.arch}:')
|
||||||
|
|
||||||
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
||||||
help="repo to use")
|
help="repo to use")
|
||||||
@ -256,7 +256,7 @@ class DepTool(cmdln.Cmdln):
|
|||||||
if name.startswith('pattern-order()'):
|
if name.startswith('pattern-order()'):
|
||||||
# XXX: no function in bindings to do that properly
|
# XXX: no function in bindings to do that properly
|
||||||
order = name[name.find('= ') + 2:]
|
order = name[name.find('= ') + 2:]
|
||||||
print("{} {}".format(order, s.name))
|
print(f"{order} {s.name}")
|
||||||
|
|
||||||
@cmdln.option("--providers", action="store_true",
|
@cmdln.option("--providers", action="store_true",
|
||||||
help="also show other providers")
|
help="also show other providers")
|
||||||
@ -288,7 +288,7 @@ class DepTool(cmdln.Cmdln):
|
|||||||
logger.info('nothing %s %s', kind.lower(), r)
|
logger.info('nothing %s %s', kind.lower(), r)
|
||||||
continue
|
continue
|
||||||
for s in sel.solvables():
|
for s in sel.solvables():
|
||||||
print(' {}: {}-{}@{}'.format(r, s.name, s.evr, s.arch))
|
print(f' {r}: {s.name}-{s.evr}@{s.arch}')
|
||||||
else:
|
else:
|
||||||
for n in args:
|
for n in args:
|
||||||
sel = self.pool.select(n, solv.Selection.SELECTION_NAME)
|
sel = self.pool.select(n, solv.Selection.SELECTION_NAME)
|
||||||
@ -312,7 +312,7 @@ class DepTool(cmdln.Cmdln):
|
|||||||
if not kindprinted:
|
if not kindprinted:
|
||||||
print(kind)
|
print(kind)
|
||||||
kindprinted = True
|
kindprinted = True
|
||||||
print(' {}: {}-{}@{}'.format(p, r.name, r.evr, r.arch))
|
print(f' {p}: {r.name}-{r.evr}@{r.arch}')
|
||||||
|
|
||||||
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
||||||
help="repo to use")
|
help="repo to use")
|
||||||
@ -340,7 +340,7 @@ class DepTool(cmdln.Cmdln):
|
|||||||
if not kindprinted:
|
if not kindprinted:
|
||||||
print(kind)
|
print(kind)
|
||||||
kindprinted = True
|
kindprinted = True
|
||||||
print(' {}-{}@{}'.format(r.name, r.evr, r.arch))
|
print(f' {r.name}-{r.evr}@{r.arch}')
|
||||||
|
|
||||||
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
@cmdln.option("-r", "--repo", dest="repo", action="append",
|
||||||
help="repo to use")
|
help="repo to use")
|
||||||
@ -362,7 +362,7 @@ class DepTool(cmdln.Cmdln):
|
|||||||
# pretty stupid, just lookup strings
|
# pretty stupid, just lookup strings
|
||||||
value = s.lookup_str(sid)
|
value = s.lookup_str(sid)
|
||||||
if value:
|
if value:
|
||||||
print('{}: {}'.format(attr[len('SOLVABLE_'):], value))
|
print(f"{attr[len('SOLVABLE_'):]}: {value}")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -56,7 +56,7 @@ def devel_projects_get(apiurl, project):
|
|||||||
"""
|
"""
|
||||||
devel_projects = {}
|
devel_projects = {}
|
||||||
|
|
||||||
root = search(apiurl, **{'package': "@project='{}'".format(project)})['package']
|
root = search(apiurl, **{'package': f"@project='{project}'"})['package']
|
||||||
for devel in root.findall('package/devel[@project]'):
|
for devel in root.findall('package/devel[@project]'):
|
||||||
devel_projects[devel.attrib['project']] = True
|
devel_projects[devel.attrib['project']] = True
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ def maintainer(args):
|
|||||||
groups = meta.xpath('group[@role="maintainer"]/@groupid')
|
groups = meta.xpath('group[@role="maintainer"]/@groupid')
|
||||||
intersection = set(groups).intersection(desired)
|
intersection = set(groups).intersection(desired)
|
||||||
if len(intersection) != len(desired):
|
if len(intersection) != len(desired):
|
||||||
print('{} missing {}'.format(devel_project, ', '.join(desired - intersection)))
|
print(f"{devel_project} missing {', '.join(desired - intersection)}")
|
||||||
|
|
||||||
|
|
||||||
def notify(args):
|
def notify(args):
|
||||||
@ -125,7 +125,7 @@ def notify(args):
|
|||||||
maintainer_map.setdefault(userid, set())
|
maintainer_map.setdefault(userid, set())
|
||||||
maintainer_map[userid].add(devel_package_identifier)
|
maintainer_map[userid].add(devel_package_identifier)
|
||||||
|
|
||||||
subject = 'Packages you maintain are present in {}'.format(args.project)
|
subject = f'Packages you maintain are present in {args.project}'
|
||||||
for userid, package_identifiers in maintainer_map.items():
|
for userid, package_identifiers in maintainer_map.items():
|
||||||
email = entity_email(apiurl, userid)
|
email = entity_email(apiurl, userid)
|
||||||
message = """This is a friendly reminder about your packages in {}.
|
message = """This is a friendly reminder about your packages in {}.
|
||||||
@ -146,14 +146,14 @@ in charge of the following packages:
|
|||||||
- {}""".format(
|
- {}""".format(
|
||||||
args.project, '\n- '.join(sorted(package_identifiers)))
|
args.project, '\n- '.join(sorted(package_identifiers)))
|
||||||
|
|
||||||
log = 'notified {} of {} packages'.format(userid, len(package_identifiers))
|
log = f'notified {userid} of {len(package_identifiers)} packages'
|
||||||
try:
|
try:
|
||||||
mail_send(apiurl, args.project, email, subject, message, dry=args.dry)
|
mail_send(apiurl, args.project, email, subject, message, dry=args.dry)
|
||||||
print(log)
|
print(log)
|
||||||
except smtplib.SMTPRecipientsRefused:
|
except smtplib.SMTPRecipientsRefused:
|
||||||
print('[FAILED ADDRESS] {} ({})'.format(log, email))
|
print(f'[FAILED ADDRESS] {log} ({email})')
|
||||||
except smtplib.SMTPException as e:
|
except smtplib.SMTPException as e:
|
||||||
print('[FAILED SMTP] {} ({})'.format(log, e))
|
print(f'[FAILED SMTP] {log} ({e})')
|
||||||
|
|
||||||
|
|
||||||
def requests(args):
|
def requests(args):
|
||||||
@ -176,7 +176,7 @@ def requests(args):
|
|||||||
request.reqid,
|
request.reqid,
|
||||||
'/'.join((action.tgt_project, action.tgt_package)),
|
'/'.join((action.tgt_project, action.tgt_package)),
|
||||||
'/'.join((action.src_project, action.src_package)),
|
'/'.join((action.src_project, action.src_package)),
|
||||||
'({} days old)'.format(age),
|
f'({age} days old)',
|
||||||
)))
|
)))
|
||||||
|
|
||||||
if args.remind:
|
if args.remind:
|
||||||
@ -210,7 +210,7 @@ def reviews(args):
|
|||||||
request.reqid,
|
request.reqid,
|
||||||
'/'.join((review.by_project, review.by_package)) if review.by_package else review.by_project,
|
'/'.join((review.by_project, review.by_package)) if review.by_package else review.by_project,
|
||||||
'/'.join((action.tgt_project, action.tgt_package)),
|
'/'.join((action.tgt_project, action.tgt_package)),
|
||||||
'({} days old)'.format(age),
|
f'({age} days old)',
|
||||||
)))
|
)))
|
||||||
|
|
||||||
if args.remind:
|
if args.remind:
|
||||||
@ -248,7 +248,7 @@ def remind_comment(apiurl, repeat_age, request_id, project, package=None):
|
|||||||
if comment:
|
if comment:
|
||||||
delta = datetime.utcnow() - comment['when']
|
delta = datetime.utcnow() - comment['when']
|
||||||
if delta.days < repeat_age:
|
if delta.days < repeat_age:
|
||||||
print(' skipping due to previous reminder from {} days ago'.format(delta.days))
|
print(f' skipping due to previous reminder from {delta.days} days ago')
|
||||||
return
|
return
|
||||||
|
|
||||||
# Repeat notification so remove old comment.
|
# Repeat notification so remove old comment.
|
||||||
@ -264,7 +264,7 @@ def remind_comment(apiurl, repeat_age, request_id, project, package=None):
|
|||||||
userids = sorted(maintainers_get(apiurl, project, package))
|
userids = sorted(maintainers_get(apiurl, project, package))
|
||||||
if len(userids):
|
if len(userids):
|
||||||
users = ['@' + userid for userid in userids]
|
users = ['@' + userid for userid in userids]
|
||||||
message = '{}: {}'.format(', '.join(users), REMINDER)
|
message = f"{', '.join(users)}: {REMINDER}"
|
||||||
else:
|
else:
|
||||||
message = REMINDER
|
message = REMINDER
|
||||||
print(' ' + message)
|
print(' ' + message)
|
||||||
|
2
dist/ci/smtp/eml-server.py
vendored
2
dist/ci/smtp/eml-server.py
vendored
@ -17,7 +17,7 @@ class EmlServer(SMTPServer):
|
|||||||
f = open(filename, 'w')
|
f = open(filename, 'w')
|
||||||
f.write(data)
|
f.write(data)
|
||||||
f.close
|
f.close
|
||||||
print('%s saved.' % filename)
|
print(f'{filename} saved.')
|
||||||
self.no += 1
|
self.no += 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ class DockerImagePublisherRegistry(DockerImagePublisher):
|
|||||||
|
|
||||||
def getDockerArch(self, arch):
|
def getDockerArch(self, arch):
|
||||||
if arch not in self.MAP_ARCH_RPM_DOCKER:
|
if arch not in self.MAP_ARCH_RPM_DOCKER:
|
||||||
raise DockerPublishException("Unknown arch %s" % arch)
|
raise DockerPublishException(f"Unknown arch {arch}")
|
||||||
|
|
||||||
return self.MAP_ARCH_RPM_DOCKER[arch]
|
return self.MAP_ARCH_RPM_DOCKER[arch]
|
||||||
|
|
||||||
@ -288,7 +288,7 @@ class DockerImageFetcherURL(DockerImageFetcher):
|
|||||||
tar_file.write(requests.get(self.url).content)
|
tar_file.write(requests.get(self.url).content)
|
||||||
with tempfile.TemporaryDirectory() as tar_dir:
|
with tempfile.TemporaryDirectory() as tar_dir:
|
||||||
# Extract the .tar.xz into the dir
|
# Extract the .tar.xz into the dir
|
||||||
subprocess.call("tar -xaf '%s' -C '%s'" % (tar_file.name, tar_dir), shell=True)
|
subprocess.call(f"tar -xaf '{tar_file.name}' -C '{tar_dir}'", shell=True)
|
||||||
return callback(tar_dir)
|
return callback(tar_dir)
|
||||||
|
|
||||||
|
|
||||||
@ -354,7 +354,7 @@ class DockerImageFetcherOBS(DockerImageFetcher):
|
|||||||
tar_file.write(requests.get(self.newest_release_url + "/" + filename).content)
|
tar_file.write(requests.get(self.newest_release_url + "/" + filename).content)
|
||||||
with tempfile.TemporaryDirectory() as tar_dir:
|
with tempfile.TemporaryDirectory() as tar_dir:
|
||||||
# Extract the .tar into the dir
|
# Extract the .tar into the dir
|
||||||
subprocess.call("tar -xaf '%s' -C '%s'" % (tar_file.name, tar_dir), shell=True)
|
subprocess.call(f"tar -xaf '{tar_file.name}' -C '{tar_dir}'", shell=True)
|
||||||
return callback(tar_dir)
|
return callback(tar_dir)
|
||||||
|
|
||||||
|
|
||||||
@ -412,25 +412,25 @@ def run():
|
|||||||
success = True
|
success = True
|
||||||
|
|
||||||
for distro in args.distros:
|
for distro in args.distros:
|
||||||
print("Handling %s" % distro)
|
print(f"Handling {distro}")
|
||||||
|
|
||||||
archs_to_update = {}
|
archs_to_update = {}
|
||||||
fetchers = config[distro]['fetchers']
|
fetchers = config[distro]['fetchers']
|
||||||
publisher = config[distro]['publisher']
|
publisher = config[distro]['publisher']
|
||||||
|
|
||||||
for arch in fetchers:
|
for arch in fetchers:
|
||||||
print("\tArchitecture %s" % arch)
|
print(f"\tArchitecture {arch}")
|
||||||
try:
|
try:
|
||||||
current = fetchers[arch].currentVersion()
|
current = fetchers[arch].currentVersion()
|
||||||
print("\t\tAvailable version: %s" % current)
|
print(f"\t\tAvailable version: {current}")
|
||||||
|
|
||||||
released = publisher.releasedDockerImageVersion(arch)
|
released = publisher.releasedDockerImageVersion(arch)
|
||||||
print("\t\tReleased version: %s" % released)
|
print(f"\t\tReleased version: {released}")
|
||||||
|
|
||||||
if current != released:
|
if current != released:
|
||||||
archs_to_update[arch] = current
|
archs_to_update[arch] = current
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("\t\tException during version fetching: %s" % e)
|
print(f"\t\tException during version fetching: {e}")
|
||||||
|
|
||||||
if not archs_to_update:
|
if not archs_to_update:
|
||||||
print("\tNothing to do.")
|
print("\tNothing to do.")
|
||||||
@ -444,7 +444,7 @@ def run():
|
|||||||
need_to_upload = False
|
need_to_upload = False
|
||||||
|
|
||||||
for arch, version in archs_to_update.items():
|
for arch, version in archs_to_update.items():
|
||||||
print("\tUpdating %s image to version %s" % (arch, version))
|
print(f"\tUpdating {arch} image to version {version}")
|
||||||
try:
|
try:
|
||||||
fetchers[arch].getDockerImage(lambda image_path: publisher.addImage(version=version,
|
fetchers[arch].getDockerImage(lambda image_path: publisher.addImage(version=version,
|
||||||
arch=arch,
|
arch=arch,
|
||||||
@ -452,11 +452,11 @@ def run():
|
|||||||
need_to_upload = True
|
need_to_upload = True
|
||||||
|
|
||||||
except DockerFetchException as dfe:
|
except DockerFetchException as dfe:
|
||||||
print("\t\tCould not fetch the image: %s" % dfe)
|
print(f"\t\tCould not fetch the image: {dfe}")
|
||||||
success = False
|
success = False
|
||||||
continue
|
continue
|
||||||
except DockerPublishException as dpe:
|
except DockerPublishException as dpe:
|
||||||
print("\t\tCould not publish the image: %s" % dpe)
|
print(f"\t\tCould not publish the image: {dpe}")
|
||||||
success = False
|
success = False
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ class DockerRegistryClient():
|
|||||||
self.username = username
|
self.username = username
|
||||||
self.password = password
|
self.password = password
|
||||||
self.repository = repository
|
self.repository = repository
|
||||||
self.scopes = ["repository:%s:pull,push,delete" % repository]
|
self.scopes = [f"repository:{repository}:pull,push,delete"]
|
||||||
self.token = None
|
self.token = None
|
||||||
|
|
||||||
class DockerRegistryError(Exception):
|
class DockerRegistryError(Exception):
|
||||||
@ -63,7 +63,7 @@ class DockerRegistryClient():
|
|||||||
bearer_dict[assignment[0]] = assignment[1].strip('"')
|
bearer_dict[assignment[0]] = assignment[1].strip('"')
|
||||||
|
|
||||||
scope_param = "&scope=".join([""] + [urllib.parse.quote(scope) for scope in self.scopes])
|
scope_param = "&scope=".join([""] + [urllib.parse.quote(scope) for scope in self.scopes])
|
||||||
response = requests.get("%s?service=%s%s" % (bearer_dict['realm'], bearer_dict['service'], scope_param),
|
response = requests.get(f"{bearer_dict['realm']}?service={bearer_dict['service']}{scope_param}",
|
||||||
auth=(self.username, self.password))
|
auth=(self.username, self.password))
|
||||||
self.token = response.json()['token']
|
self.token = response.json()['token']
|
||||||
|
|
||||||
@ -123,7 +123,7 @@ class DockerRegistryClient():
|
|||||||
alg.update(content)
|
alg.update(content)
|
||||||
reference = "sha256:" + alg.hexdigest()
|
reference = "sha256:" + alg.hexdigest()
|
||||||
|
|
||||||
resp = self.doHttpCall("PUT", "/v2/%s/manifests/%s" % (self.repository, reference),
|
resp = self.doHttpCall("PUT", f"/v2/{self.repository}/manifests/{reference}",
|
||||||
headers={'Content-Type': content_json['mediaType']},
|
headers={'Content-Type': content_json['mediaType']},
|
||||||
data=content)
|
data=content)
|
||||||
|
|
||||||
@ -153,7 +153,7 @@ class DockerRegistryClient():
|
|||||||
def getManifest(self, reference):
|
def getManifest(self, reference):
|
||||||
"""Get a (json-parsed) manifest with the given reference (digest or tag).
|
"""Get a (json-parsed) manifest with the given reference (digest or tag).
|
||||||
If the manifest does not exist, return None. For other errors, False."""
|
If the manifest does not exist, return None. For other errors, False."""
|
||||||
resp = self.doHttpCall("GET", "/v2/%s/manifests/%s" % (self.repository, reference),
|
resp = self.doHttpCall("GET", f"/v2/{self.repository}/manifests/{reference}",
|
||||||
headers={'Accept': "application/vnd.docker.distribution.manifest.list.v2+json,application/vnd.docker.distribution.manifest.v2+json"}) # noqa: E501
|
headers={'Accept': "application/vnd.docker.distribution.manifest.list.v2+json,application/vnd.docker.distribution.manifest.v2+json"}) # noqa: E501
|
||||||
|
|
||||||
if resp.status_code == 404:
|
if resp.status_code == 404:
|
||||||
@ -167,7 +167,7 @@ class DockerRegistryClient():
|
|||||||
def getManifestDigest(self, reference):
|
def getManifestDigest(self, reference):
|
||||||
"""Return the digest of the manifest with the given reference.
|
"""Return the digest of the manifest with the given reference.
|
||||||
If the manifest doesn't exist or the request fails, it returns False."""
|
If the manifest doesn't exist or the request fails, it returns False."""
|
||||||
resp = self.doHttpCall("HEAD", "/v2/%s/manifests/%s" % (self.repository, reference),
|
resp = self.doHttpCall("HEAD", f"/v2/{self.repository}/manifests/{reference}",
|
||||||
headers={'Accept': "application/vnd.docker.distribution.manifest.list.v2+json,application/vnd.docker.distribution.manifest.v2+json"}) # noqa: E501
|
headers={'Accept': "application/vnd.docker.distribution.manifest.list.v2+json,application/vnd.docker.distribution.manifest.v2+json"}) # noqa: E501
|
||||||
|
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
@ -177,7 +177,7 @@ class DockerRegistryClient():
|
|||||||
|
|
||||||
def deleteManifest(self, digest):
|
def deleteManifest(self, digest):
|
||||||
"""Delete the manifest with the given reference."""
|
"""Delete the manifest with the given reference."""
|
||||||
resp = self.doHttpCall("DELETE", "/v2/%s/manifests/%s" % (self.repository, digest))
|
resp = self.doHttpCall("DELETE", f"/v2/{self.repository}/manifests/{digest}")
|
||||||
|
|
||||||
return resp.status_code == 202
|
return resp.status_code == 202
|
||||||
|
|
||||||
@ -193,7 +193,7 @@ class DockerRegistryClient():
|
|||||||
raise Exception("Invalid digest")
|
raise Exception("Invalid digest")
|
||||||
|
|
||||||
# Check whether the blob already exists - don't upload it needlessly.
|
# Check whether the blob already exists - don't upload it needlessly.
|
||||||
stat_request = self.doHttpCall("HEAD", "/v2/%s/blobs/%s" % (self.repository, digest))
|
stat_request = self.doHttpCall("HEAD", f"/v2/{self.repository}/blobs/{digest}")
|
||||||
if stat_request.status_code == 200 or stat_request.status_code == 307:
|
if stat_request.status_code == 200 or stat_request.status_code == 307:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -204,7 +204,7 @@ class DockerRegistryClient():
|
|||||||
content = blob.read()
|
content = blob.read()
|
||||||
|
|
||||||
# First request an upload "slot", we get an URL we can PUT to back
|
# First request an upload "slot", we get an URL we can PUT to back
|
||||||
upload_request = self.doHttpCall("POST", "/v2/%s/blobs/uploads/" % self.repository)
|
upload_request = self.doHttpCall("POST", f"/v2/{self.repository}/blobs/uploads/")
|
||||||
if upload_request.status_code == 202:
|
if upload_request.status_code == 202:
|
||||||
location = upload_request.headers['Location']
|
location = upload_request.headers['Location']
|
||||||
upload = self.doHttpCall("PUT", location + "&digest=" + digest,
|
upload = self.doHttpCall("PUT", location + "&digest=" + digest,
|
||||||
|
@ -81,7 +81,7 @@ if not options.version:
|
|||||||
conn.request('HEAD', u.path)
|
conn.request('HEAD', u.path)
|
||||||
res = conn.getresponse()
|
res = conn.getresponse()
|
||||||
if res.status != 302:
|
if res.status != 302:
|
||||||
raise Exception("http fail: %s %s" % (res.status, res.reason))
|
raise Exception(f"http fail: {res.status} {res.reason}")
|
||||||
|
|
||||||
loc = res.getheader('location')
|
loc = res.getheader('location')
|
||||||
if loc is None:
|
if loc is None:
|
||||||
@ -89,7 +89,7 @@ if not options.version:
|
|||||||
|
|
||||||
m = re.search(r'(?:Snapshot|Build)([\d.]+)-Media', loc)
|
m = re.search(r'(?:Snapshot|Build)([\d.]+)-Media', loc)
|
||||||
if m is None:
|
if m is None:
|
||||||
raise Exception("failed to parse %s" % loc)
|
raise Exception(f"failed to parse {loc}")
|
||||||
|
|
||||||
version = m.group(1)
|
version = m.group(1)
|
||||||
logger.debug("found version %s", version)
|
logger.debug("found version %s", version)
|
||||||
@ -117,7 +117,7 @@ conn = http.client.HTTPConnection(u.hostname, 80)
|
|||||||
conn.request('GET', u.path)
|
conn.request('GET', u.path)
|
||||||
res = conn.getresponse()
|
res = conn.getresponse()
|
||||||
if res.status != 200:
|
if res.status != 200:
|
||||||
raise Exception("http %s fail: %s %s" % (u, res.status, res.reason))
|
raise Exception(f"http {u} fail: {res.status} {res.reason}")
|
||||||
|
|
||||||
txt = res.read().decode('latin1')
|
txt = res.read().decode('latin1')
|
||||||
if '====' not in txt:
|
if '====' not in txt:
|
||||||
@ -136,7 +136,7 @@ if options.dry:
|
|||||||
print("sending ...")
|
print("sending ...")
|
||||||
print(msg.as_string())
|
print(msg.as_string())
|
||||||
else:
|
else:
|
||||||
logger.info("announcing version {}".format(version))
|
logger.info(f"announcing version {version}")
|
||||||
s = smtplib.SMTP(config['relay'])
|
s = smtplib.SMTP(config['relay'])
|
||||||
s.send_message(msg)
|
s.send_message(msg)
|
||||||
s.quit()
|
s.quit()
|
||||||
|
@ -32,7 +32,7 @@ def list(dirpath):
|
|||||||
for i in sorted(os.listdir(_dir), reverse=True):
|
for i in sorted(os.listdir(_dir), reverse=True):
|
||||||
if not digits_re.match(i):
|
if not digits_re.match(i):
|
||||||
continue
|
continue
|
||||||
ret = ret + '<a href="diff/%s">%s</a>' % (i, i)
|
ret = ret + f'<a href="diff/{i}">{i}</a>'
|
||||||
if i == current:
|
if i == current:
|
||||||
ret = ret + " <--"
|
ret = ret + " <--"
|
||||||
ret = ret + '<br/>'
|
ret = ret + '<br/>'
|
||||||
|
@ -87,9 +87,9 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
'kernel-vanilla',
|
'kernel-vanilla',
|
||||||
'kernel-xen',
|
'kernel-xen',
|
||||||
):
|
):
|
||||||
srpm = '%s-%s-%s.src.rpm' % ('kernel-source', m.group('version'), m.group('release'))
|
srpm = f"kernel-source-{m.group('version')}-{m.group('release')}.src.rpm"
|
||||||
pkgdata[binrpm]['sourcerpm'] = srpm
|
pkgdata[binrpm]['sourcerpm'] = srpm
|
||||||
print("%s -> %s" % (utf8str(h['sourcerpm']), srpm))
|
print(f"{utf8str(h['sourcerpm'])} -> {srpm}")
|
||||||
|
|
||||||
if srpm in changelogs:
|
if srpm in changelogs:
|
||||||
changelogs[srpm]['packages'].append(binrpm)
|
changelogs[srpm]['packages'].append(binrpm)
|
||||||
@ -104,7 +104,7 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
def _walk_through_iso_image(iso, path="/"):
|
def _walk_through_iso_image(iso, path="/"):
|
||||||
file_stats = iso.readdir(path)
|
file_stats = iso.readdir(path)
|
||||||
if file_stats is None:
|
if file_stats is None:
|
||||||
raise Exception("Unable to find directory %s inside the iso image" % path)
|
raise Exception(f"Unable to find directory {path} inside the iso image")
|
||||||
|
|
||||||
for stat in file_stats:
|
for stat in file_stats:
|
||||||
filename = stat[0]
|
filename = stat[0]
|
||||||
@ -128,7 +128,7 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
fd = os.open(arg, os.O_RDONLY)
|
fd = os.open(arg, os.O_RDONLY)
|
||||||
|
|
||||||
if not iso.is_open() or fd is None:
|
if not iso.is_open() or fd is None:
|
||||||
raise Exception("Could not open %s as an ISO-9660 image." % arg)
|
raise Exception(f"Could not open {arg} as an ISO-9660 image.")
|
||||||
|
|
||||||
for filename, LSN in _walk_through_iso_image(iso):
|
for filename, LSN in _walk_through_iso_image(iso):
|
||||||
os.lseek(fd, LSN * pycdio.ISO_BLOCKSIZE, io.SEEK_SET)
|
os.lseek(fd, LSN * pycdio.ISO_BLOCKSIZE, io.SEEK_SET)
|
||||||
@ -145,7 +145,7 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
h = self.readRpmHeader(pkg)
|
h = self.readRpmHeader(pkg)
|
||||||
_getdata(h)
|
_getdata(h)
|
||||||
else:
|
else:
|
||||||
raise Exception("don't know what to do with %s" % arg)
|
raise Exception(f"don't know what to do with {arg}")
|
||||||
|
|
||||||
return pkgdata, changelogs
|
return pkgdata, changelogs
|
||||||
|
|
||||||
@ -161,7 +161,7 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
if not opts.dir:
|
if not opts.dir:
|
||||||
raise Exception("need --dir option")
|
raise Exception("need --dir option")
|
||||||
if not os.path.isdir(opts.dir):
|
if not os.path.isdir(opts.dir):
|
||||||
raise Exception("%s must be a directory" % opts.dir)
|
raise Exception(f"{opts.dir} must be a directory")
|
||||||
if not opts.snapshot:
|
if not opts.snapshot:
|
||||||
raise Exception("missing snapshot option")
|
raise Exception("missing snapshot option")
|
||||||
|
|
||||||
@ -207,18 +207,18 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
if not opts.dir:
|
if not opts.dir:
|
||||||
raise Exception("need --dir option")
|
raise Exception("need --dir option")
|
||||||
if not os.path.isdir(opts.dir):
|
if not os.path.isdir(opts.dir):
|
||||||
raise Exception("%s must be a directory" % opts.dir)
|
raise Exception(f"{opts.dir} must be a directory")
|
||||||
|
|
||||||
f = open(os.path.join(opts.dir, version1), 'rb')
|
f = open(os.path.join(opts.dir, version1), 'rb')
|
||||||
(v, (v1pkgs, v1changelogs)) = pickle.load(f,
|
(v, (v1pkgs, v1changelogs)) = pickle.load(f,
|
||||||
encoding='utf-8', errors='backslashreplace')
|
encoding='utf-8', errors='backslashreplace')
|
||||||
if v != data_version:
|
if v != data_version:
|
||||||
raise Exception("not matching version %s in %s" % (v, version1))
|
raise Exception(f"not matching version {v} in {version1}")
|
||||||
f = open(os.path.join(opts.dir, version2), 'rb')
|
f = open(os.path.join(opts.dir, version2), 'rb')
|
||||||
(v, (v2pkgs, v2changelogs)) = pickle.load(f,
|
(v, (v2pkgs, v2changelogs)) = pickle.load(f,
|
||||||
encoding='utf-8', errors='backslashreplace')
|
encoding='utf-8', errors='backslashreplace')
|
||||||
if v != data_version:
|
if v != data_version:
|
||||||
raise Exception("not matching version %s in %s" % (v, version2))
|
raise Exception(f"not matching version {v} in {version2}")
|
||||||
|
|
||||||
p1 = set(v1pkgs.keys())
|
p1 = set(v1pkgs.keys())
|
||||||
p2 = set(v2pkgs.keys())
|
p2 = set(v2pkgs.keys())
|
||||||
@ -237,7 +237,7 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
try:
|
try:
|
||||||
t1 = v1changelogs[srpm1]['changelogtime'][0]
|
t1 = v1changelogs[srpm1]['changelogtime'][0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
print("{} doesn't have a changelog".format(srpm1), file=sys.stderr)
|
print(f"{srpm1} doesn't have a changelog", file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
m = SRPM_RE.match(srpm)
|
m = SRPM_RE.match(srpm)
|
||||||
if m:
|
if m:
|
||||||
@ -245,21 +245,21 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
else:
|
else:
|
||||||
name = srpm
|
name = srpm
|
||||||
if len(v2changelogs[srpm]['changelogtime']) == 0:
|
if len(v2changelogs[srpm]['changelogtime']) == 0:
|
||||||
print(' {} ERROR: no changelog'.format(name))
|
print(f' {name} ERROR: no changelog')
|
||||||
continue
|
continue
|
||||||
if t1 == v2changelogs[srpm]['changelogtime'][0]:
|
if t1 == v2changelogs[srpm]['changelogtime'][0]:
|
||||||
continue # no new changelog entry, probably just rebuilt
|
continue # no new changelog entry, probably just rebuilt
|
||||||
pkgs = sorted(group[srpm])
|
pkgs = sorted(group[srpm])
|
||||||
details += "\n==== %s ====\n" % name
|
details += f"\n==== {name} ====\n"
|
||||||
if v1pkgs[pkgs[0]]['version'] != v2pkgs[pkgs[0]]['version']:
|
if v1pkgs[pkgs[0]]['version'] != v2pkgs[pkgs[0]]['version']:
|
||||||
print(" %s (%s -> %s)" % (name, v1pkgs[pkgs[0]]['version'],
|
print(" %s (%s -> %s)" % (name, v1pkgs[pkgs[0]]['version'],
|
||||||
v2pkgs[pkgs[0]]['version']))
|
v2pkgs[pkgs[0]]['version']))
|
||||||
details += "Version update (%s -> %s)\n" % (v1pkgs[pkgs[0]]['version'],
|
details += "Version update (%s -> %s)\n" % (v1pkgs[pkgs[0]]['version'],
|
||||||
v2pkgs[pkgs[0]]['version'])
|
v2pkgs[pkgs[0]]['version'])
|
||||||
else:
|
else:
|
||||||
print(" %s" % name)
|
print(f" {name}")
|
||||||
if len(pkgs) > 1:
|
if len(pkgs) > 1:
|
||||||
details += "Subpackages: %s\n" % " ".join([p for p in pkgs if p != name])
|
details += f"Subpackages: {' '.join([p for p in pkgs if p != name])}\n"
|
||||||
|
|
||||||
changedetails = ""
|
changedetails = ""
|
||||||
for (i2, t2) in enumerate(v2changelogs[srpm]['changelogtime']):
|
for (i2, t2) in enumerate(v2changelogs[srpm]['changelogtime']):
|
||||||
@ -273,7 +273,7 @@ class ChangeLogger(cmdln.Cmdln):
|
|||||||
if len(changedetails_lines) > changelog_max_lines + 5:
|
if len(changedetails_lines) > changelog_max_lines + 5:
|
||||||
changedetails = '\n'.join(changedetails_lines[0:changelog_max_lines])
|
changedetails = '\n'.join(changedetails_lines[0:changelog_max_lines])
|
||||||
left = len(changedetails_lines) - changelog_max_lines - 1
|
left = len(changedetails_lines) - changelog_max_lines - 1
|
||||||
changedetails += '\n ... changelog too long, skipping {} lines ...\n'.format(left)
|
changedetails += f'\n ... changelog too long, skipping {left} lines ...\n'
|
||||||
# add last line of changelog diff so that it's possible to
|
# add last line of changelog diff so that it's possible to
|
||||||
# find out the end of the changelog section
|
# find out the end of the changelog section
|
||||||
changedetails += changedetails_lines[-1]
|
changedetails += changedetails_lines[-1]
|
||||||
|
@ -19,7 +19,7 @@ from osclib.core import project_pseudometa_package
|
|||||||
OPENSUSE = 'openSUSE:Leap:15.2'
|
OPENSUSE = 'openSUSE:Leap:15.2'
|
||||||
OPENSUSE_PREVERSION = 'openSUSE:Leap:15.1'
|
OPENSUSE_PREVERSION = 'openSUSE:Leap:15.1'
|
||||||
OPENSUSE_RELEASED_VERSION = ['openSUSE:Leap:15.0', 'openSUSE:Leap:15.1']
|
OPENSUSE_RELEASED_VERSION = ['openSUSE:Leap:15.0', 'openSUSE:Leap:15.1']
|
||||||
FCC = '{}:FactoryCandidates'.format(OPENSUSE)
|
FCC = f'{OPENSUSE}:FactoryCandidates'
|
||||||
|
|
||||||
makeurl = osc.core.makeurl
|
makeurl = osc.core.makeurl
|
||||||
http_GET = osc.core.http_GET
|
http_GET = osc.core.http_GET
|
||||||
@ -51,7 +51,7 @@ class FccFreezer(object):
|
|||||||
add to the frozenlinks, can be the ignored package.
|
add to the frozenlinks, can be the ignored package.
|
||||||
"""
|
"""
|
||||||
package = si.get('package')
|
package = si.get('package')
|
||||||
logging.debug("Processing %s" % (package))
|
logging.debug(f"Processing {package}")
|
||||||
|
|
||||||
# If the package is an internal one (e.g _product)
|
# If the package is an internal one (e.g _product)
|
||||||
if package.startswith('_') or package.startswith('Test-DVD') or package.startswith('000'):
|
if package.startswith('_') or package.startswith('Test-DVD') or package.startswith('000'):
|
||||||
@ -72,7 +72,7 @@ class FccFreezer(object):
|
|||||||
proot = ET.parse(f).getroot()
|
proot = ET.parse(f).getroot()
|
||||||
lsrcmd5 = proot.get('lsrcmd5')
|
lsrcmd5 = proot.get('lsrcmd5')
|
||||||
if lsrcmd5 is None:
|
if lsrcmd5 is None:
|
||||||
raise Exception("{}/{} is not a link but we expected one".format(self.factory, package))
|
raise Exception(f"{self.factory}/{package} is not a link but we expected one")
|
||||||
ET.SubElement(flink, 'package', {'name': package, 'srcmd5': lsrcmd5, 'vrev': si.get('vrev')})
|
ET.SubElement(flink, 'package', {'name': package, 'srcmd5': lsrcmd5, 'vrev': si.get('vrev')})
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -111,7 +111,7 @@ class FccFreezer(object):
|
|||||||
if self.debug:
|
if self.debug:
|
||||||
logging.debug("Dump ignored source")
|
logging.debug("Dump ignored source")
|
||||||
for source in ignored_sources:
|
for source in ignored_sources:
|
||||||
logging.debug("Ignored source: %s" % source)
|
logging.debug(f"Ignored source: {source}")
|
||||||
|
|
||||||
url = makeurl(self.apiurl, ['source', FCC, '_project', '_frozenlinks'], {'meta': '1'})
|
url = makeurl(self.apiurl, ['source', FCC, '_project', '_frozenlinks'], {'meta': '1'})
|
||||||
link = ET.tostring(flink)
|
link = ET.tostring(flink)
|
||||||
@ -206,7 +206,7 @@ class FccSubmitter(object):
|
|||||||
|
|
||||||
def is_new_package(self, tgt_project, tgt_package):
|
def is_new_package(self, tgt_project, tgt_package):
|
||||||
try:
|
try:
|
||||||
logging.debug("Gathering package_meta %s/%s" % (tgt_project, tgt_package))
|
logging.debug(f"Gathering package_meta {tgt_project}/{tgt_package}")
|
||||||
osc.core.show_package_meta(self.apiurl, tgt_project, tgt_package)
|
osc.core.show_package_meta(self.apiurl, tgt_project, tgt_package)
|
||||||
except (HTTPError, URLError):
|
except (HTTPError, URLError):
|
||||||
return True
|
return True
|
||||||
@ -217,7 +217,7 @@ class FccSubmitter(object):
|
|||||||
src_project = self.factory # submit from Factory only
|
src_project = self.factory # submit from Factory only
|
||||||
dst_project = self.to_prj
|
dst_project = self.to_prj
|
||||||
|
|
||||||
msg = 'Automatic request from %s by F-C-C Submitter. Please review this change and decline it if Leap do not need it.' % src_project
|
msg = f'Automatic request from {src_project} by F-C-C Submitter. Please review this change and decline it if Leap do not need it.'
|
||||||
res = osc.core.create_submit_request(self.apiurl,
|
res = osc.core.create_submit_request(self.apiurl,
|
||||||
src_project,
|
src_project,
|
||||||
package,
|
package,
|
||||||
@ -253,7 +253,7 @@ class FccSubmitter(object):
|
|||||||
def is_sle_base_pkgs(self, package):
|
def is_sle_base_pkgs(self, package):
|
||||||
link = self.get_link(self.to_prj, package)
|
link = self.get_link(self.to_prj, package)
|
||||||
if link is None or link.get('project') not in self.sle_base_prjs:
|
if link is None or link.get('project') not in self.sle_base_prjs:
|
||||||
logging.debug("%s not from SLE base" % package)
|
logging.debug(f"{package} not from SLE base")
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -262,7 +262,7 @@ class FccSubmitter(object):
|
|||||||
succeeded_packages = []
|
succeeded_packages = []
|
||||||
succeeded_packages = self.get_build_succeeded_packages(self.from_prj)
|
succeeded_packages = self.get_build_succeeded_packages(self.from_prj)
|
||||||
if not len(succeeded_packages) > 0:
|
if not len(succeeded_packages) > 0:
|
||||||
logging.info('No build succeeded package in %s' % self.from_prj)
|
logging.info(f'No build succeeded package in {self.from_prj}')
|
||||||
return
|
return
|
||||||
|
|
||||||
print('Build succeeded packages:')
|
print('Build succeeded packages:')
|
||||||
@ -271,7 +271,7 @@ class FccSubmitter(object):
|
|||||||
print(pkg)
|
print(pkg)
|
||||||
|
|
||||||
print('-------------------------------------')
|
print('-------------------------------------')
|
||||||
print("Found {} build succeded packages".format(len(succeeded_packages)))
|
print(f"Found {len(succeeded_packages)} build succeded packages")
|
||||||
|
|
||||||
def get_deleted_packages(self, project):
|
def get_deleted_packages(self, project):
|
||||||
query = 'states=accepted&types=delete&project={}&view=collection'
|
query = 'states=accepted&types=delete&project={}&view=collection'
|
||||||
@ -288,7 +288,7 @@ class FccSubmitter(object):
|
|||||||
return pkgs
|
return pkgs
|
||||||
|
|
||||||
def load_skip_pkgs_list(self, project, package):
|
def load_skip_pkgs_list(self, project, package):
|
||||||
url = makeurl(self.apiurl, ['source', project, package, '{}?expand=1'.format('fcc_skip_pkgs')])
|
url = makeurl(self.apiurl, ['source', project, package, 'fcc_skip_pkgs?expand=1'])
|
||||||
try:
|
try:
|
||||||
return http_GET(url).read()
|
return http_GET(url).read()
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
@ -299,7 +299,7 @@ class FccSubmitter(object):
|
|||||||
succeeded_packages = []
|
succeeded_packages = []
|
||||||
succeeded_packages = self.get_build_succeeded_packages(self.from_prj)
|
succeeded_packages = self.get_build_succeeded_packages(self.from_prj)
|
||||||
if not len(succeeded_packages) > 0:
|
if not len(succeeded_packages) > 0:
|
||||||
logging.info('No build succeeded package in %s' % self.from_prj)
|
logging.info(f'No build succeeded package in {self.from_prj}')
|
||||||
return
|
return
|
||||||
|
|
||||||
# randomize the list
|
# randomize the list
|
||||||
@ -321,22 +321,22 @@ class FccSubmitter(object):
|
|||||||
submit_ok = True
|
submit_ok = True
|
||||||
|
|
||||||
if package in deleted_packages:
|
if package in deleted_packages:
|
||||||
logging.info('%s has been dropped from %s, ignore it!' % (package, self.to_prj))
|
logging.info(f'{package} has been dropped from {self.to_prj}, ignore it!')
|
||||||
submit_ok = False
|
submit_ok = False
|
||||||
|
|
||||||
if self.is_sle_base_pkgs(package) is True:
|
if self.is_sle_base_pkgs(package) is True:
|
||||||
logging.info('%s origin from SLE base, skip for now!' % package)
|
logging.info(f'{package} origin from SLE base, skip for now!')
|
||||||
submit_ok = False
|
submit_ok = False
|
||||||
|
|
||||||
# make sure it is new package
|
# make sure it is new package
|
||||||
new_pkg = self.is_new_package(self.to_prj, package)
|
new_pkg = self.is_new_package(self.to_prj, package)
|
||||||
if new_pkg is not True:
|
if new_pkg is not True:
|
||||||
logging.info('%s is not a new package, do not submit.' % package)
|
logging.info(f'{package} is not a new package, do not submit.')
|
||||||
submit_ok = False
|
submit_ok = False
|
||||||
|
|
||||||
multi_specs = self.check_multiple_specfiles(self.factory, package)
|
multi_specs = self.check_multiple_specfiles(self.factory, package)
|
||||||
if multi_specs is None:
|
if multi_specs is None:
|
||||||
logging.info('%s does not exist in %s' % (package, 'openSUSE:Factory'))
|
logging.info(f'{package} does not exist in openSUSE:Factory')
|
||||||
submit_ok = False
|
submit_ok = False
|
||||||
|
|
||||||
if multi_specs:
|
if multi_specs:
|
||||||
@ -348,7 +348,7 @@ class FccSubmitter(object):
|
|||||||
|
|
||||||
for spec in multi_specs['specs']:
|
for spec in multi_specs['specs']:
|
||||||
if spec not in succeeded_packages:
|
if spec not in succeeded_packages:
|
||||||
logging.info('%s is sub-pacakge of %s but build failed, skip it!' % (spec, package))
|
logging.info(f'{spec} is sub-pacakge of {package} but build failed, skip it!')
|
||||||
submit_ok = False
|
submit_ok = False
|
||||||
|
|
||||||
if not submit_ok:
|
if not submit_ok:
|
||||||
@ -375,7 +375,7 @@ class FccSubmitter(object):
|
|||||||
match = True
|
match = True
|
||||||
|
|
||||||
if match is not True:
|
if match is not True:
|
||||||
logging.info('%s/%s is in the skip list, do not submit.' % (devel_prj, package))
|
logging.info(f'{devel_prj}/{package} is in the skip list, do not submit.')
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
@ -388,18 +388,18 @@ class FccSubmitter(object):
|
|||||||
match = True
|
match = True
|
||||||
|
|
||||||
if match is True:
|
if match is True:
|
||||||
logging.info('%s is in the skip list, do not submit.' % package)
|
logging.info(f'{package} is in the skip list, do not submit.')
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
res = self.create_submitrequest(package)
|
res = self.create_submitrequest(package)
|
||||||
if res and res is not None:
|
if res and res is not None:
|
||||||
logging.info('Created request %s for %s' % (res, package))
|
logging.info(f'Created request {res} for {package}')
|
||||||
else:
|
else:
|
||||||
logging.error('Error occurred when creating submit request')
|
logging.error('Error occurred when creating submit request')
|
||||||
else:
|
else:
|
||||||
logging.debug('%s is exist in %s, skip!' % (package, self.to_prj))
|
logging.debug(f'{package} is exist in {self.to_prj}, skip!')
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
# dump multi specs packages
|
# dump multi specs packages
|
||||||
@ -417,7 +417,7 @@ def main(args):
|
|||||||
osc.conf.config['debug'] = args.debug
|
osc.conf.config['debug'] = args.debug
|
||||||
|
|
||||||
if args.freeze:
|
if args.freeze:
|
||||||
print('freezing {}'.format(FCC))
|
print(f'freezing {FCC}')
|
||||||
freezer = FccFreezer()
|
freezer = FccFreezer()
|
||||||
freezer.freeze()
|
freezer.freeze()
|
||||||
else:
|
else:
|
||||||
@ -436,10 +436,10 @@ if __name__ == '__main__':
|
|||||||
parser.add_argument('-d', '--debug', action='store_true',
|
parser.add_argument('-d', '--debug', action='store_true',
|
||||||
help='print info useful for debuging')
|
help='print info useful for debuging')
|
||||||
parser.add_argument('-f', '--from', dest='from_prj', metavar='PROJECT',
|
parser.add_argument('-f', '--from', dest='from_prj', metavar='PROJECT',
|
||||||
help='project where to check (default: %s)' % FCC,
|
help=f'project where to check (default: {FCC})',
|
||||||
default=FCC)
|
default=FCC)
|
||||||
parser.add_argument('-t', '--to', dest='to_prj', metavar='PROJECT',
|
parser.add_argument('-t', '--to', dest='to_prj', metavar='PROJECT',
|
||||||
help='project where to submit the packages (default: %s)' % OPENSUSE,
|
help=f'project where to submit the packages (default: {OPENSUSE})',
|
||||||
default=OPENSUSE)
|
default=OPENSUSE)
|
||||||
parser.add_argument('-r', '--freeze', dest='freeze', action='store_true', help='rebase FCC project')
|
parser.add_argument('-r', '--freeze', dest='freeze', action='store_true', help='rebase FCC project')
|
||||||
parser.add_argument('-s', '--list', dest='list_packages', action='store_true', help='list build succeeded packages')
|
parser.add_argument('-s', '--list', dest='list_packages', action='store_true', help='list build succeeded packages')
|
||||||
|
@ -35,9 +35,9 @@ def notify_project(openqa, state):
|
|||||||
logger.debug(f'{state} did not change')
|
logger.debug(f'{state} did not change')
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
openqa.openqa_request('PUT', 'obs_rsync/{}/runs?repository={}'.format(project, repository), retries=0)
|
openqa.openqa_request('PUT', f'obs_rsync/{project}/runs?repository={repository}', retries=0)
|
||||||
except RequestError as e:
|
except RequestError as e:
|
||||||
logger.info("Got exception on syncing repository: {}".format(e))
|
logger.info(f"Got exception on syncing repository: {e}")
|
||||||
return
|
return
|
||||||
copyfile(old_filename(state), new_filename(state))
|
copyfile(old_filename(state), new_filename(state))
|
||||||
subprocess.run(f'cd {args.to} && git add . && git commit -m "Update of {project}/{repository}" && git push', shell=True, check=True)
|
subprocess.run(f'cd {args.to} && git add . && git commit -m "Update of {project}/{repository}" && git push', shell=True, check=True)
|
||||||
@ -70,7 +70,7 @@ if __name__ == '__main__':
|
|||||||
interesting_repos[f'{project}_-_{repository}'] = 1
|
interesting_repos[f'{project}_-_{repository}'] = 1
|
||||||
|
|
||||||
openqa = OpenQA_Client(server=args.openqa)
|
openqa = OpenQA_Client(server=args.openqa)
|
||||||
for state in glob.glob('{}/*.yaml'.format(args.repos)):
|
for state in glob.glob(f'{args.repos}/*.yaml'):
|
||||||
state = basename(state).replace('.yaml', '')
|
state = basename(state).replace('.yaml', '')
|
||||||
if state not in interesting_repos:
|
if state not in interesting_repos:
|
||||||
continue
|
continue
|
||||||
|
@ -44,7 +44,7 @@ class Project(object):
|
|||||||
def map_iso(self, staging_project, iso):
|
def map_iso(self, staging_project, iso):
|
||||||
parts = self.replace_string.split('/')
|
parts = self.replace_string.split('/')
|
||||||
if parts[0] != 's':
|
if parts[0] != 's':
|
||||||
raise Exception("{}'s iso_replace_string does not start with s/".format(self.name))
|
raise Exception(f"{self.name}'s iso_replace_string does not start with s/")
|
||||||
old = parts[1]
|
old = parts[1]
|
||||||
new = parts[2]
|
new = parts[2]
|
||||||
new = new.replace('$LETTER', self.staging_letter(staging_project))
|
new = new.replace('$LETTER', self.staging_letter(staging_project))
|
||||||
@ -316,10 +316,10 @@ class Listener(PubSubConsumer):
|
|||||||
return [job for job in jobs if self.is_production_job(job)]
|
return [job for job in jobs if self.is_production_job(job)]
|
||||||
|
|
||||||
def get_step_url(self, testurl, modulename):
|
def get_step_url(self, testurl, modulename):
|
||||||
failurl = testurl + '/modules/{!s}/fails'.format(quote_plus(modulename))
|
failurl = testurl + f'/modules/{quote_plus(modulename)!s}/fails'
|
||||||
fails = requests.get(failurl).json()
|
fails = requests.get(failurl).json()
|
||||||
failed_step = fails.get('first_failed_step', 1)
|
failed_step = fails.get('first_failed_step', 1)
|
||||||
return "{!s}#step/{!s}/{:d}".format(testurl, modulename, failed_step)
|
return f"{testurl!s}#step/{modulename!s}/{failed_step:d}"
|
||||||
|
|
||||||
def test_url(self, job):
|
def test_url(self, job):
|
||||||
url = self.openqa_url + ("/tests/%d" % job['id'])
|
url = self.openqa_url + ("/tests/%d" % job['id'])
|
||||||
@ -340,7 +340,7 @@ class Listener(PubSubConsumer):
|
|||||||
|
|
||||||
def on_message(self, unused_channel, method, properties, body):
|
def on_message(self, unused_channel, method, properties, body):
|
||||||
self.acknowledge_message(method.delivery_tag)
|
self.acknowledge_message(method.delivery_tag)
|
||||||
if method.routing_key == '{}.obs.repo.published'.format(amqp_prefix):
|
if method.routing_key == f'{amqp_prefix}.obs.repo.published':
|
||||||
self.on_published_repo(json.loads(body))
|
self.on_published_repo(json.loads(body))
|
||||||
elif re.search(r'.openqa.', method.routing_key):
|
elif re.search(r'.openqa.', method.routing_key):
|
||||||
data = json.loads(body)
|
data = json.loads(body)
|
||||||
@ -351,7 +351,7 @@ class Listener(PubSubConsumer):
|
|||||||
elif data.get('HDD_1'):
|
elif data.get('HDD_1'):
|
||||||
self.on_openqa_job(data.get('HDD_1'))
|
self.on_openqa_job(data.get('HDD_1'))
|
||||||
else:
|
else:
|
||||||
self.logger.warning("unknown rabbitmq message {}".format(method.routing_key))
|
self.logger.warning(f"unknown rabbitmq message {method.routing_key}")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -61,10 +61,10 @@ class Listener(PubSubConsumer):
|
|||||||
for arch in archs:
|
for arch in archs:
|
||||||
repoid = self.check_arch(project, repository, arch)
|
repoid = self.check_arch(project, repository, arch)
|
||||||
if not repoid:
|
if not repoid:
|
||||||
self.logger.info('{}/{}/{} not yet done'.format(project, repository, arch))
|
self.logger.info(f'{project}/{repository}/{arch} not yet done')
|
||||||
return None
|
return None
|
||||||
ids[arch] = repoid
|
ids[arch] = repoid
|
||||||
self.logger.info('All of {}/{} finished'.format(project, repository))
|
self.logger.info(f'All of {project}/{repository} finished')
|
||||||
return ids
|
return ids
|
||||||
|
|
||||||
def is_part_of_namespaces(self, project):
|
def is_part_of_namespaces(self, project):
|
||||||
@ -76,7 +76,7 @@ class Listener(PubSubConsumer):
|
|||||||
# now we are (re-)connected to the bus and need to fetch the
|
# now we are (re-)connected to the bus and need to fetch the
|
||||||
# initial state
|
# initial state
|
||||||
for namespace in self.namespaces:
|
for namespace in self.namespaces:
|
||||||
for state in glob.glob('{}*.yaml'.format(namespace)):
|
for state in glob.glob(f'{namespace}*.yaml'):
|
||||||
state = state.replace('.yaml', '')
|
state = state.replace('.yaml', '')
|
||||||
# split
|
# split
|
||||||
project, repository = state.split('_-_')
|
project, repository = state.split('_-_')
|
||||||
@ -117,8 +117,8 @@ class Listener(PubSubConsumer):
|
|||||||
pathname = project + '_-_' + repository + '.yaml'
|
pathname = project + '_-_' + repository + '.yaml'
|
||||||
with open(pathname, 'w') as f:
|
with open(pathname, 'w') as f:
|
||||||
for arch in sorted(ids.keys()):
|
for arch in sorted(ids.keys()):
|
||||||
f.write('{}: {}\n'.format(arch, ids[arch]))
|
f.write(f'{arch}: {ids[arch]}\n')
|
||||||
self.push_git('Repository update: {}/{}'.format(project, repository))
|
self.push_git(f'Repository update: {project}/{repository}')
|
||||||
|
|
||||||
def on_message(self, unused_channel, method, properties, body):
|
def on_message(self, unused_channel, method, properties, body):
|
||||||
self.logger.debug("on_message")
|
self.logger.debug("on_message")
|
||||||
@ -131,11 +131,11 @@ class Listener(PubSubConsumer):
|
|||||||
if not self.is_part_of_namespaces(body['project']):
|
if not self.is_part_of_namespaces(body['project']):
|
||||||
return
|
return
|
||||||
self.restart_timer()
|
self.restart_timer()
|
||||||
self.logger.info('Repo finished event: {}/{}/{}'.format(body['project'], body['repo'], body['arch']))
|
self.logger.info(f"Repo finished event: {body['project']}/{body['repo']}/{body['arch']}")
|
||||||
self.update_repo(body['project'], body['repo'])
|
self.update_repo(body['project'], body['repo'])
|
||||||
else:
|
else:
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
'unknown rabbitmq message {}'.format(method.routing_key))
|
f'unknown rabbitmq message {method.routing_key}')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -33,7 +33,7 @@ if __name__ == '__main__':
|
|||||||
root = ET.parse(http_GET(url)).getroot()
|
root = ET.parse(http_GET(url)).getroot()
|
||||||
if root.get('code') == 'finished':
|
if root.get('code') == 'finished':
|
||||||
continue
|
continue
|
||||||
logger.error('Repository {}/{}/{} is not yet finished'.format(args.project, args.repository, arch))
|
logger.error(f'Repository {args.project}/{args.repository}/{arch} is not yet finished')
|
||||||
logger.debug(ET.tostring(root).decode('utf-8'))
|
logger.debug(ET.tostring(root).decode('utf-8'))
|
||||||
# scheduling means the scheduler had some reason to double check the repository state.
|
# scheduling means the scheduler had some reason to double check the repository state.
|
||||||
# this may or may not result in a restart of the build, but if it doesn't, we're in trouble.
|
# this may or may not result in a restart of the build, but if it doesn't, we're in trouble.
|
||||||
@ -54,9 +54,9 @@ if __name__ == '__main__':
|
|||||||
if count.get('code') in ['succeeded', 'excluded', 'disabled']:
|
if count.get('code') in ['succeeded', 'excluded', 'disabled']:
|
||||||
counts[count.get('code')] = int(count.get('count'))
|
counts[count.get('code')] = int(count.get('count'))
|
||||||
continue
|
continue
|
||||||
logger.error('Repository {}/{} has {} packages'.format(args.project, args.repository, count.get('code')))
|
logger.error(f"Repository {args.project}/{args.repository} has {count.get('code')} packages")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if counts['disabled'] > counts['succeeded']:
|
if counts['disabled'] > counts['succeeded']:
|
||||||
logger.error('Repository {}/{} has more disabled packages than succeeded'.format(args.project, args.repository))
|
logger.error(f'Repository {args.project}/{args.repository} has more disabled packages than succeeded')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -64,10 +64,10 @@ def bug_owner(apiurl, package, entity='person'):
|
|||||||
url = osc.core.makeurl(apiurl, ('search', 'owner'), query=query)
|
url = osc.core.makeurl(apiurl, ('search', 'owner'), query=query)
|
||||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||||
|
|
||||||
bugowner = root.find('.//{}[@role="bugowner"]'.format(entity))
|
bugowner = root.find(f'.//{entity}[@role="bugowner"]')
|
||||||
if bugowner is not None:
|
if bugowner is not None:
|
||||||
return entity_email(apiurl, bugowner.get('name'), entity)
|
return entity_email(apiurl, bugowner.get('name'), entity)
|
||||||
maintainer = root.find('.//{}[@role="maintainer"]'.format(entity))
|
maintainer = root.find(f'.//{entity}[@role="maintainer"]')
|
||||||
if maintainer is not None:
|
if maintainer is not None:
|
||||||
return entity_email(apiurl, maintainer.get('name'), entity)
|
return entity_email(apiurl, maintainer.get('name'), entity)
|
||||||
if entity == 'person':
|
if entity == 'person':
|
||||||
@ -108,7 +108,7 @@ def bugzilla_init(apiurl):
|
|||||||
def prompt_continue(change_count):
|
def prompt_continue(change_count):
|
||||||
allowed = ['y', 'b', 's', 'n', '']
|
allowed = ['y', 'b', 's', 'n', '']
|
||||||
if change_count > 0:
|
if change_count > 0:
|
||||||
print('File bug for {} issues and continue? [y/b/s/n/?] (y): '.format(change_count), end='')
|
print(f'File bug for {change_count} issues and continue? [y/b/s/n/?] (y): ', end='')
|
||||||
else:
|
else:
|
||||||
print('No changes for which to create bug, continue? [y/b/s/n/?] (y): ', end='')
|
print('No changes for which to create bug, continue? [y/b/s/n/?] (y): ', end='')
|
||||||
|
|
||||||
@ -120,7 +120,7 @@ def prompt_continue(change_count):
|
|||||||
response = 'y'
|
response = 'y'
|
||||||
return response
|
return response
|
||||||
else:
|
else:
|
||||||
print('Invalid response: {}'.format(response))
|
print(f'Invalid response: {response}')
|
||||||
|
|
||||||
return prompt_continue(change_count)
|
return prompt_continue(change_count)
|
||||||
|
|
||||||
@ -128,7 +128,7 @@ def prompt_continue(change_count):
|
|||||||
def prompt_interactive(changes, project, package):
|
def prompt_interactive(changes, project, package):
|
||||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.yml') as temp:
|
with tempfile.NamedTemporaryFile(mode='w', suffix='.yml') as temp:
|
||||||
temp.write(yaml.safe_dump(changes, default_flow_style=False, default_style="'") + '\n')
|
temp.write(yaml.safe_dump(changes, default_flow_style=False, default_style="'") + '\n')
|
||||||
temp.write('# {}/{}\n'.format(project, package))
|
temp.write(f'# {project}/{package}\n')
|
||||||
temp.write('# comment or remove lines to whitelist issues')
|
temp.write('# comment or remove lines to whitelist issues')
|
||||||
temp.flush()
|
temp.flush()
|
||||||
|
|
||||||
@ -161,7 +161,7 @@ def issue_normalize(trackers, tracker, name):
|
|||||||
if tracker in trackers:
|
if tracker in trackers:
|
||||||
return trackers[tracker].replace('@@@', name)
|
return trackers[tracker].replace('@@@', name)
|
||||||
|
|
||||||
print('WARNING: ignoring unknown tracker {} for {}'.format(tracker, name))
|
print(f'WARNING: ignoring unknown tracker {tracker} for {name}')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -227,10 +227,10 @@ def print_stats(db):
|
|||||||
reported += 1
|
reported += 1
|
||||||
else:
|
else:
|
||||||
whitelisted += 1
|
whitelisted += 1
|
||||||
print('Packages: {}'.format(len(db)))
|
print(f'Packages: {len(db)}')
|
||||||
print('Bugs: {}'.format(len(set(bug_ids))))
|
print(f'Bugs: {len(set(bug_ids))}')
|
||||||
print('Reported: {}'.format(reported))
|
print(f'Reported: {reported}')
|
||||||
print('Whitelisted: {}'.format(whitelisted))
|
print(f'Whitelisted: {whitelisted}')
|
||||||
|
|
||||||
|
|
||||||
def main(args):
|
def main(args):
|
||||||
@ -252,14 +252,14 @@ def main(args):
|
|||||||
git_repo_url = 'git@github.com:jberry-suse/openSUSE-release-tools-issue-db.git'
|
git_repo_url = 'git@github.com:jberry-suse/openSUSE-release-tools-issue-db.git'
|
||||||
git_message = 'Sync issue-diff.py changes.'
|
git_message = 'Sync issue-diff.py changes.'
|
||||||
db_dir = sync(args.cache_dir, git_repo_url, git_message)
|
db_dir = sync(args.cache_dir, git_repo_url, git_message)
|
||||||
db_file = os.path.join(db_dir, '{}.yml'.format(args.project))
|
db_file = os.path.join(db_dir, f'{args.project}.yml')
|
||||||
|
|
||||||
if os.path.exists(db_file):
|
if os.path.exists(db_file):
|
||||||
db = yaml.safe_load(open(db_file).read())
|
db = yaml.safe_load(open(db_file).read())
|
||||||
if db is None:
|
if db is None:
|
||||||
db = {}
|
db = {}
|
||||||
else:
|
else:
|
||||||
print('Loaded db file: {}'.format(db_file))
|
print(f'Loaded db file: {db_file}')
|
||||||
else:
|
else:
|
||||||
db = {}
|
db = {}
|
||||||
|
|
||||||
@ -267,7 +267,7 @@ def main(args):
|
|||||||
print_stats(db)
|
print_stats(db)
|
||||||
return
|
return
|
||||||
|
|
||||||
print('Comparing {} against {}'.format(args.project, args.factory))
|
print(f'Comparing {args.project} against {args.factory}')
|
||||||
|
|
||||||
bugzilla_api = bugzilla_init(args.bugzilla_apiurl)
|
bugzilla_api = bugzilla_init(args.bugzilla_apiurl)
|
||||||
bugzilla_defaults = (args.bugzilla_product, args.bugzilla_component, args.bugzilla_version)
|
bugzilla_defaults = (args.bugzilla_product, args.bugzilla_component, args.bugzilla_version)
|
||||||
@ -280,9 +280,9 @@ def main(args):
|
|||||||
shuffle(list(packages))
|
shuffle(list(packages))
|
||||||
for index, package in enumerate(packages, start=1):
|
for index, package in enumerate(packages, start=1):
|
||||||
if index % 50 == 0:
|
if index % 50 == 0:
|
||||||
print('Checked {} of {}'.format(index, len(packages)))
|
print(f'Checked {index} of {len(packages)}')
|
||||||
if package in db and db[package] == 'whitelist':
|
if package in db and db[package] == 'whitelist':
|
||||||
print('Skipping package {}'.format(package))
|
print(f'Skipping package {package}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
issues_project = issues_get(apiurl, args.project, package, trackers, db)
|
issues_project = issues_get(apiurl, args.project, package, trackers, db)
|
||||||
@ -299,7 +299,7 @@ def main(args):
|
|||||||
if len(missing_from_factory) == 0:
|
if len(missing_from_factory) == 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print('{}: {} missing'.format(package, len(missing_from_factory)))
|
print(f'{package}: {len(missing_from_factory)} missing')
|
||||||
|
|
||||||
# Generate summaries for issues missing from factory.
|
# Generate summaries for issues missing from factory.
|
||||||
changes = {}
|
changes = {}
|
||||||
@ -361,12 +361,12 @@ def main(args):
|
|||||||
break
|
break
|
||||||
except Fault as e:
|
except Fault as e:
|
||||||
if 'There is no component named' in e.faultString:
|
if 'There is no component named' in e.faultString:
|
||||||
print('Invalid component {}, fallback to default'.format(meta[1]))
|
print(f'Invalid component {meta[1]}, fallback to default')
|
||||||
meta = (meta[0], bugzilla_defaults[1], meta[2])
|
meta = (meta[0], bugzilla_defaults[1], meta[2])
|
||||||
elif 'is not a valid username' in e.faultString:
|
elif 'is not a valid username' in e.faultString:
|
||||||
username = e.faultString.split(' ', 3)[2]
|
username = e.faultString.split(' ', 3)[2]
|
||||||
cc.remove(username)
|
cc.remove(username)
|
||||||
print('Removed invalid username {}'.format(username))
|
print(f'Removed invalid username {username}')
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
tries += 1
|
tries += 1
|
||||||
@ -389,9 +389,9 @@ def main(args):
|
|||||||
yaml.safe_dump(db, outfile, default_flow_style=False, default_style="'")
|
yaml.safe_dump(db, outfile, default_flow_style=False, default_style="'")
|
||||||
|
|
||||||
if notified > 0:
|
if notified > 0:
|
||||||
print('{}: {} notified in bug {}, {} whitelisted'.format(package, notified, bug_id, whitelisted))
|
print(f'{package}: {notified} notified in bug {bug_id}, {whitelisted} whitelisted')
|
||||||
else:
|
else:
|
||||||
print('{}: {} whitelisted'.format(package, whitelisted))
|
print(f'{package}: {whitelisted} whitelisted')
|
||||||
|
|
||||||
if response == 'b':
|
if response == 'b':
|
||||||
break
|
break
|
||||||
|
@ -46,7 +46,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
return http_GET(url)
|
return http_GET(url)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
if 500 <= e.code <= 599:
|
if 500 <= e.code <= 599:
|
||||||
self.logger.debug('Retrying {}'.format(url))
|
self.logger.debug(f'Retrying {url}')
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
return self.retried_GET(url)
|
return self.retried_GET(url)
|
||||||
raise e
|
raise e
|
||||||
@ -107,7 +107,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
return True
|
return True
|
||||||
to_review = self.open_reviews.get(self.request_nick(), None)
|
to_review = self.open_reviews.get(self.request_nick(), None)
|
||||||
if to_review:
|
if to_review:
|
||||||
self.logger.info("Found {}".format(json.dumps(to_review)))
|
self.logger.info(f"Found {json.dumps(to_review)}")
|
||||||
to_review = to_review or self.create_db_entry(
|
to_review = to_review or self.create_db_entry(
|
||||||
src_project, src_package, src_rev)
|
src_project, src_package, src_rev)
|
||||||
if not to_review:
|
if not to_review:
|
||||||
@ -117,7 +117,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
url = osc.core.makeurl(self.legaldb, ['package', str(pack)])
|
url = osc.core.makeurl(self.legaldb, ['package', str(pack)])
|
||||||
report = REQ.get(url, headers=self.legaldb_headers).json()
|
report = REQ.get(url, headers=self.legaldb_headers).json()
|
||||||
if report.get('priority', 0) != self.request_priority():
|
if report.get('priority', 0) != self.request_priority():
|
||||||
self.logger.debug('Update priority {}'.format(self.request_priority()))
|
self.logger.debug(f'Update priority {self.request_priority()}')
|
||||||
url = osc.core.makeurl(
|
url = osc.core.makeurl(
|
||||||
self.legaldb, ['package', str(pack)], {'priority': self.request_priority()})
|
self.legaldb, ['package', str(pack)], {'priority': self.request_priority()})
|
||||||
REQ.patch(url, headers=self.legaldb_headers)
|
REQ.patch(url, headers=self.legaldb_headers)
|
||||||
@ -149,7 +149,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
self.message = "@{} declined the legal report with the following comment: {}".format(
|
self.message = "@{} declined the legal report with the following comment: {}".format(
|
||||||
user, comment)
|
user, comment)
|
||||||
else:
|
else:
|
||||||
self.message = "@{} declined the legal report".format(user)
|
self.message = f"@{user} declined the legal report"
|
||||||
return None
|
return None
|
||||||
return False
|
return False
|
||||||
# print url, json.dumps(report)
|
# print url, json.dumps(report)
|
||||||
@ -161,11 +161,11 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
self.message = None
|
self.message = None
|
||||||
result = super(LegalAuto, self).check_one_request(req)
|
result = super(LegalAuto, self).check_one_request(req)
|
||||||
if result is None and self.message is not None:
|
if result is None and self.message is not None:
|
||||||
self.logger.debug("Result of {}: {}".format(req.reqid, self.message))
|
self.logger.debug(f"Result of {req.reqid}: {self.message}")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def check_action__default(self, req, a):
|
def check_action__default(self, req, a):
|
||||||
self.logger.error("unhandled request type %s" % a.type)
|
self.logger.error(f"unhandled request type {a.type}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def prepare_review(self):
|
def prepare_review(self):
|
||||||
@ -199,11 +199,10 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
# overload as we need to get of the bot_request
|
# overload as we need to get of the bot_request
|
||||||
def _set_review(self, req, state):
|
def _set_review(self, req, state):
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
self.logger.debug("dry setting %s to %s with %s" %
|
self.logger.debug(f"dry setting {req.reqid} to {state} with {self.message}")
|
||||||
(req.reqid, state, self.message))
|
|
||||||
return
|
return
|
||||||
|
|
||||||
self.logger.debug("setting %s to %s" % (req.reqid, state))
|
self.logger.debug(f"setting {req.reqid} to {state}")
|
||||||
osc.core.change_review_state(apiurl=self.apiurl,
|
osc.core.change_review_state(apiurl=self.apiurl,
|
||||||
reqid=req.reqid, newstate=state,
|
reqid=req.reqid, newstate=state,
|
||||||
by_group=self.review_group,
|
by_group=self.review_group,
|
||||||
@ -211,7 +210,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
self.delete_from_db(req.reqid)
|
self.delete_from_db(req.reqid)
|
||||||
|
|
||||||
def update_project(self, project):
|
def update_project(self, project):
|
||||||
yaml_path = os.path.join(CacheManager.directory('legal-auto'), '{}.yaml'.format(project))
|
yaml_path = os.path.join(CacheManager.directory('legal-auto'), f'{project}.yaml')
|
||||||
try:
|
try:
|
||||||
with open(yaml_path, 'r') as file:
|
with open(yaml_path, 'r') as file:
|
||||||
self.pkg_cache = yaml.load(file, Loader=yaml.SafeLoader)
|
self.pkg_cache = yaml.load(file, Loader=yaml.SafeLoader)
|
||||||
@ -256,7 +255,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
if match and match.group(1) == package:
|
if match and match.group(1) == package:
|
||||||
lpackage = package
|
lpackage = package
|
||||||
if package != lpackage:
|
if package != lpackage:
|
||||||
self.logger.info("SKIP {}, it links to {}".format(package, lpackage))
|
self.logger.info(f"SKIP {package}, it links to {lpackage}")
|
||||||
skip = True
|
skip = True
|
||||||
break
|
break
|
||||||
if skip:
|
if skip:
|
||||||
@ -282,7 +281,7 @@ class LegalAuto(ReviewBot.ReviewBot):
|
|||||||
if 'saved' not in obj:
|
if 'saved' not in obj:
|
||||||
return None
|
return None
|
||||||
legaldb_id = obj['saved']['id']
|
legaldb_id = obj['saved']['id']
|
||||||
self.logger.debug("PKG {}/{}[{}]->{} is {}".format(sproject, package, revision, tproject, legaldb_id))
|
self.logger.debug(f"PKG {sproject}/{package}[{revision}]->{tproject} is {legaldb_id}")
|
||||||
self.pkg_cache[package] = {revision: legaldb_id}
|
self.pkg_cache[package] = {revision: legaldb_id}
|
||||||
if obj['saved']['state'] == 'obsolete':
|
if obj['saved']['state'] == 'obsolete':
|
||||||
url = osc.core.makeurl(self.legaldb, ['packages', 'import', str(legaldb_id)], {
|
url = osc.core.makeurl(self.legaldb, ['packages', 'import', str(legaldb_id)], {
|
||||||
|
@ -20,11 +20,11 @@ class MaintInstCheck(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
def repository_check(self, repository_pairs, archs):
|
def repository_check(self, repository_pairs, archs):
|
||||||
project, repository = repository_pairs[0]
|
project, repository = repository_pairs[0]
|
||||||
self.logger.info('checking {}/{}'.format(project, repository))
|
self.logger.info(f'checking {project}/{repository}')
|
||||||
|
|
||||||
if not len(archs):
|
if not len(archs):
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
'{} has no relevant architectures'.format(project))
|
f'{project} has no relevant architectures')
|
||||||
return
|
return
|
||||||
|
|
||||||
for arch in archs:
|
for arch in archs:
|
||||||
@ -36,7 +36,7 @@ class MaintInstCheck(ReviewBot.ReviewBot):
|
|||||||
parts = installcheck(directories, arch, [], [])
|
parts = installcheck(directories, arch, [], [])
|
||||||
if len(parts):
|
if len(parts):
|
||||||
self.comment.append(
|
self.comment.append(
|
||||||
'## {}/{}\n'.format(repository_pairs[0][1], arch))
|
f'## {repository_pairs[0][1]}/{arch}\n')
|
||||||
self.comment.extend(parts)
|
self.comment.extend(parts)
|
||||||
|
|
||||||
return len(self.comment) == 0
|
return len(self.comment) == 0
|
||||||
@ -71,7 +71,7 @@ class MaintInstCheck(ReviewBot.ReviewBot):
|
|||||||
# targeting multiple projects such as in maintenance workflow in
|
# targeting multiple projects such as in maintenance workflow in
|
||||||
# which the message should be set by other actions.
|
# which the message should be set by other actions.
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
'skipping review of action targeting {}'.format(action.tgt_project))
|
f'skipping review of action targeting {action.tgt_project}')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
repository = target_config.get('main-repo')
|
repository = target_config.get('main-repo')
|
||||||
|
26
metrics.py
26
metrics.py
@ -75,7 +75,7 @@ def search_paginated_generator(apiurl, queries=None, **kwargs):
|
|||||||
while True:
|
while True:
|
||||||
collection = osc.core.search(apiurl, queries, **kwargs)['request']
|
collection = osc.core.search(apiurl, queries, **kwargs)['request']
|
||||||
if not request_count:
|
if not request_count:
|
||||||
print('processing {:,} requests'.format(int(collection.get('matches'))))
|
print(f"processing {int(collection.get('matches')):,} requests")
|
||||||
|
|
||||||
for request in collection.findall('request'):
|
for request in collection.findall('request'):
|
||||||
yield request
|
yield request
|
||||||
@ -128,7 +128,7 @@ def ingest_requests(api, project):
|
|||||||
}
|
}
|
||||||
# TODO Total time spent in backlog (ie factory-staging, but excluding when staged).
|
# TODO Total time spent in backlog (ie factory-staging, but excluding when staged).
|
||||||
|
|
||||||
staged_first_review = request.xpath('review[contains(@by_project, "{}:Staging:")]'.format(project))
|
staged_first_review = request.xpath(f'review[contains(@by_project, "{project}:Staging:")]')
|
||||||
if len(staged_first_review):
|
if len(staged_first_review):
|
||||||
by_project = staged_first_review[0].get('by_project')
|
by_project = staged_first_review[0].get('by_project')
|
||||||
request_tags['type'] = 'adi' if api.is_adi_project(by_project) else 'letter'
|
request_tags['type'] = 'adi' if api.is_adi_project(by_project) else 'letter'
|
||||||
@ -143,7 +143,7 @@ def ingest_requests(api, project):
|
|||||||
# All letter where whitelisted since no restriction.
|
# All letter where whitelisted since no restriction.
|
||||||
request_tags['whitelisted'] = request_tags['type'] == 'letter'
|
request_tags['whitelisted'] = request_tags['type'] == 'letter'
|
||||||
|
|
||||||
xpath = 'review[contains(@by_project, "{}:Staging:adi:") and @state="accepted"]/'.format(project)
|
xpath = f'review[contains(@by_project, "{project}:Staging:adi:") and @state="accepted"]/'
|
||||||
xpath += 'history[comment[text() = "ready to accept"]]/@when'
|
xpath += 'history[comment[text() = "ready to accept"]]/@when'
|
||||||
ready_to_accept = request.xpath(xpath)
|
ready_to_accept = request.xpath(xpath)
|
||||||
if len(ready_to_accept):
|
if len(ready_to_accept):
|
||||||
@ -169,7 +169,7 @@ def ingest_requests(api, project):
|
|||||||
|
|
||||||
# Staging related reviews.
|
# Staging related reviews.
|
||||||
for number, review in enumerate(
|
for number, review in enumerate(
|
||||||
request.xpath('review[contains(@by_project, "{}:Staging:")]'.format(project)), start=1):
|
request.xpath(f'review[contains(@by_project, "{project}:Staging:")]'), start=1):
|
||||||
staged_at = date_parse(review.get('when'))
|
staged_at = date_parse(review.get('when'))
|
||||||
|
|
||||||
project_type = 'adi' if api.is_adi_project(review.get('by_project')) else 'letter'
|
project_type = 'adi' if api.is_adi_project(review.get('by_project')) else 'letter'
|
||||||
@ -196,7 +196,7 @@ def ingest_requests(api, project):
|
|||||||
point('total', {'backlog': 1, 'staged': -1}, unselected_at, {'event': 'unselect'}, True)
|
point('total', {'backlog': 1, 'staged': -1}, unselected_at, {'event': 'unselect'}, True)
|
||||||
|
|
||||||
# No-staging related reviews.
|
# No-staging related reviews.
|
||||||
for review in request.xpath('review[not(contains(@by_project, "{}:Staging:"))]'.format(project)):
|
for review in request.xpath(f'review[not(contains(@by_project, "{project}:Staging:"))]'):
|
||||||
tags = {
|
tags = {
|
||||||
# who_added is non-trivial due to openSUSE/open-build-service#3898.
|
# who_added is non-trivial due to openSUSE/open-build-service#3898.
|
||||||
'state': review.get('state'),
|
'state': review.get('state'),
|
||||||
@ -246,9 +246,9 @@ def ingest_requests(api, project):
|
|||||||
if priority.text in found:
|
if priority.text in found:
|
||||||
point('priority', {'count': -1}, final_at, {'level': priority.text}, True)
|
point('priority', {'count': -1}, final_at, {'level': priority.text}, True)
|
||||||
else:
|
else:
|
||||||
print('unable to find priority history entry for {} to {}'.format(request.get('id'), priority.text))
|
print(f"unable to find priority history entry for {request.get('id')} to {priority.text}")
|
||||||
|
|
||||||
print('finalizing {:,} points'.format(len(points)))
|
print(f'finalizing {len(points):,} points')
|
||||||
return walk_points(points, project)
|
return walk_points(points, project)
|
||||||
|
|
||||||
|
|
||||||
@ -340,7 +340,7 @@ def walk_points(points, target):
|
|||||||
def ingest_release_schedule(project):
|
def ingest_release_schedule(project):
|
||||||
points = []
|
points = []
|
||||||
release_schedule = {}
|
release_schedule = {}
|
||||||
release_schedule_file = os.path.join(SOURCE_DIR, 'metrics/annotation/{}.yaml'.format(project))
|
release_schedule_file = os.path.join(SOURCE_DIR, f'metrics/annotation/{project}.yaml')
|
||||||
if project.endswith('Factory'):
|
if project.endswith('Factory'):
|
||||||
# TODO Pending resolution to #1250 regarding deployment.
|
# TODO Pending resolution to #1250 regarding deployment.
|
||||||
return 0
|
return 0
|
||||||
@ -350,7 +350,7 @@ def ingest_release_schedule(project):
|
|||||||
'grep -oP "' + r'Changes\.\K\d{5,}' + '"'
|
'grep -oP "' + r'Changes\.\K\d{5,}' + '"'
|
||||||
snapshots = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE).communicate()[0]
|
snapshots = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE).communicate()[0]
|
||||||
for date in snapshots.split():
|
for date in snapshots.split():
|
||||||
release_schedule[datetime.strptime(date, '%Y%m%d')] = 'Snapshot {}'.format(date)
|
release_schedule[datetime.strptime(date, '%Y%m%d')] = f'Snapshot {date}'
|
||||||
elif os.path.isfile(release_schedule_file):
|
elif os.path.isfile(release_schedule_file):
|
||||||
# Load release schedule for non-rolling releases from yaml file.
|
# Load release schedule for non-rolling releases from yaml file.
|
||||||
with open(release_schedule_file, 'r') as stream:
|
with open(release_schedule_file, 'r') as stream:
|
||||||
@ -520,13 +520,13 @@ def ingest_dashboard(api):
|
|||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
content = dashboard_at_changed(api, filename, revision)
|
content = dashboard_at_changed(api, filename, revision)
|
||||||
if content:
|
if content:
|
||||||
map_func = globals()['ingest_dashboard_{}'.format(filename)]
|
map_func = globals()[f'ingest_dashboard_{filename}']
|
||||||
fields = map_func(content)
|
fields = map_func(content)
|
||||||
if not len(fields):
|
if not len(fields):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
points.append({
|
points.append({
|
||||||
'measurement': 'dashboard_{}'.format(filename),
|
'measurement': f'dashboard_{filename}',
|
||||||
'fields': fields,
|
'fields': fields,
|
||||||
'time': time,
|
'time': time,
|
||||||
})
|
})
|
||||||
@ -548,7 +548,7 @@ def ingest_dashboard(api):
|
|||||||
client.write_points(points, 's')
|
client.write_points(points, 's')
|
||||||
count += len(points)
|
count += len(points)
|
||||||
|
|
||||||
print('last revision processed: {}'.format(revision if len(index) else 'none'))
|
print(f"last revision processed: {revision if len(index) else 'none'}")
|
||||||
|
|
||||||
return count
|
return count
|
||||||
|
|
||||||
@ -581,7 +581,7 @@ def main(args):
|
|||||||
Config(apiurl, args.project)
|
Config(apiurl, args.project)
|
||||||
api = StagingAPI(apiurl, args.project)
|
api = StagingAPI(apiurl, args.project)
|
||||||
|
|
||||||
print('dashboard: wrote {:,} points'.format(ingest_dashboard(api)))
|
print(f'dashboard: wrote {ingest_dashboard(api):,} points')
|
||||||
|
|
||||||
global who_workaround_swap, who_workaround_miss
|
global who_workaround_swap, who_workaround_miss
|
||||||
who_workaround_swap = who_workaround_miss = 0
|
who_workaround_swap = who_workaround_miss = 0
|
||||||
|
@ -9,7 +9,7 @@ BASEURL = 'http://review.tumbleweed.boombatower.com/data/'
|
|||||||
|
|
||||||
|
|
||||||
def data_load(name):
|
def data_load(name):
|
||||||
response = requests.get(urljoin(BASEURL, '{}.yaml'.format(name)))
|
response = requests.get(urljoin(BASEURL, f'{name}.yaml'))
|
||||||
return yaml.safe_load(response.text)
|
return yaml.safe_load(response.text)
|
||||||
|
|
||||||
|
|
||||||
@ -21,8 +21,8 @@ def data_write(client, measurement, points):
|
|||||||
def ingest_data(client, name):
|
def ingest_data(client, name):
|
||||||
data = data_load(name)
|
data = data_load(name)
|
||||||
|
|
||||||
measurement = 'release_{}'.format(name)
|
measurement = f'release_{name}'
|
||||||
map_func = globals()['map_{}'.format(name)]
|
map_func = globals()[f'map_{name}']
|
||||||
points = []
|
points = []
|
||||||
for release, details in data.items():
|
for release, details in data.items():
|
||||||
points.append({
|
points.append({
|
||||||
@ -32,7 +32,7 @@ def ingest_data(client, name):
|
|||||||
})
|
})
|
||||||
|
|
||||||
data_write(client, measurement, points)
|
data_write(client, measurement, points)
|
||||||
print('wrote {} for {}'.format(len(points), name))
|
print(f'wrote {len(points)} for {name}')
|
||||||
|
|
||||||
|
|
||||||
def map_bug(bugs):
|
def map_bug(bugs):
|
||||||
|
@ -105,9 +105,9 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
def is_incident_in_testing(self, incident):
|
def is_incident_in_testing(self, incident):
|
||||||
# hard coded for now as we only run this code for SUSE Maintenance workflow
|
# hard coded for now as we only run this code for SUSE Maintenance workflow
|
||||||
project = 'SUSE:Maintenance:{}'.format(incident)
|
project = f'SUSE:Maintenance:{incident}'
|
||||||
|
|
||||||
xpath = "(state/@name='review') and (action/source/@project='{}' and action/@type='maintenance_release')".format(project)
|
xpath = f"(state/@name='review') and (action/source/@project='{project}' and action/@type='maintenance_release')"
|
||||||
res = osc.core.search(self.apiurl, request=xpath)['request']
|
res = osc.core.search(self.apiurl, request=xpath)['request']
|
||||||
# return the one and only (or None)
|
# return the one and only (or None)
|
||||||
return res.find('request')
|
return res.find('request')
|
||||||
@ -117,7 +117,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
get incident numbers from SUSE:Maintenance:Test project
|
get incident numbers from SUSE:Maintenance:Test project
|
||||||
returns dict with openQA var name : string with numbers
|
returns dict with openQA var name : string with numbers
|
||||||
"""
|
"""
|
||||||
self.logger.debug("calculate_incidents: {}".format(pformat(incidents)))
|
self.logger.debug(f"calculate_incidents: {pformat(incidents)}")
|
||||||
l_incidents = []
|
l_incidents = []
|
||||||
for kind, prj in incidents.items():
|
for kind, prj in incidents.items():
|
||||||
packages = osc.core.meta_get_packagelist(self.apiurl, prj)
|
packages = osc.core.meta_get_packagelist(self.apiurl, prj)
|
||||||
@ -137,13 +137,13 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
src_prjs = {a.src_project for a in req_.actions}
|
src_prjs = {a.src_project for a in req_.actions}
|
||||||
if SUSEUpdate.kgraft_target(self.apiurl, src_prjs.pop()):
|
if SUSEUpdate.kgraft_target(self.apiurl, src_prjs.pop()):
|
||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"calculate_incidents: Incident is kgraft - {} ".format(incident))
|
f"calculate_incidents: Incident is kgraft - {incident} ")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
incidents.append(incident)
|
incidents.append(incident)
|
||||||
|
|
||||||
l_incidents.append((kind + '_TEST_ISSUES', ','.join(incidents)))
|
l_incidents.append((kind + '_TEST_ISSUES', ','.join(incidents)))
|
||||||
self.logger.debug("Calculate incidents:{}".format(pformat(l_incidents)))
|
self.logger.debug(f"Calculate incidents:{pformat(l_incidents)}")
|
||||||
return l_incidents
|
return l_incidents
|
||||||
|
|
||||||
def jobs_for_target(self, data, build=None):
|
def jobs_for_target(self, data, build=None):
|
||||||
@ -160,7 +160,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
values['build'] = build
|
values['build'] = build
|
||||||
else:
|
else:
|
||||||
values['test'] = data['test']
|
values['test'] = data['test']
|
||||||
self.logger.debug("Get jobs: {}".format(pformat(values)))
|
self.logger.debug(f"Get jobs: {pformat(values)}")
|
||||||
return self.openqa.openqa_request('GET', 'jobs', values)['jobs']
|
return self.openqa.openqa_request('GET', 'jobs', values)['jobs']
|
||||||
|
|
||||||
# we don't know the current BUILD and querying all jobs is too expensive
|
# we don't know the current BUILD and querying all jobs is too expensive
|
||||||
@ -173,7 +173,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
try:
|
try:
|
||||||
repohash = self.calculate_repo_hash(data['repos'], self.incident_repos.get(prj, {}))
|
repohash = self.calculate_repo_hash(data['repos'], self.incident_repos.get(prj, {}))
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
self.logger.debug("REPOHASH not calculated with response {}".format(e))
|
self.logger.debug(f"REPOHASH not calculated with response {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
buildnr = None
|
buildnr = None
|
||||||
@ -202,7 +202,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
buildnr = "{!s}-{:d}".format(today, buildnr + 1)
|
buildnr = f"{today!s}-{buildnr + 1:d}"
|
||||||
|
|
||||||
s = data['settings']
|
s = data['settings']
|
||||||
# now schedule it for real
|
# now schedule it for real
|
||||||
@ -212,10 +212,10 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
s['BUILD'] = buildnr
|
s['BUILD'] = buildnr
|
||||||
s['REPOHASH'] = repohash
|
s['REPOHASH'] = repohash
|
||||||
s['_OBSOLETE'] = '1'
|
s['_OBSOLETE'] = '1'
|
||||||
self.logger.debug("Prepared: {}".format(pformat(s)))
|
self.logger.debug(f"Prepared: {pformat(s)}")
|
||||||
if not self.dryrun:
|
if not self.dryrun:
|
||||||
try:
|
try:
|
||||||
self.logger.info("Openqa isos POST {}".format(pformat(s)))
|
self.logger.info(f"Openqa isos POST {pformat(s)}")
|
||||||
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(e)
|
self.logger.error(e)
|
||||||
@ -250,7 +250,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
if incident_id in value.split(','):
|
if incident_id in value.split(','):
|
||||||
foundissue = True
|
foundissue = True
|
||||||
if not foundissue:
|
if not foundissue:
|
||||||
self.logger.info("Repo job {} not for {} - ignoring".format(job['id'], incident_id))
|
self.logger.info(f"Repo job {job['id']} not for {incident_id} - ignoring")
|
||||||
return jobs, QA_INPROGRESS
|
return jobs, QA_INPROGRESS
|
||||||
# print(foundissue, incident_id, json.dumps(job['settings'], indent=4))
|
# print(foundissue, incident_id, json.dumps(job['settings'], indent=4))
|
||||||
|
|
||||||
@ -296,14 +296,14 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_step_url(testurl, modulename):
|
def get_step_url(testurl, modulename):
|
||||||
failurl = testurl + '/modules/{!s}/fails'.format(quote_plus(modulename))
|
failurl = testurl + f'/modules/{quote_plus(modulename)!s}/fails'
|
||||||
fails = requests.get(failurl).json()
|
fails = requests.get(failurl).json()
|
||||||
failed_step = fails.get('first_failed_step', 1)
|
failed_step = fails.get('first_failed_step', 1)
|
||||||
return "[{!s}]({!s}#step/{!s}/{:d})".format(OpenQABot.emd(modulename), testurl, modulename, failed_step)
|
return f"[{OpenQABot.emd(modulename)!s}]({testurl!s}#step/{modulename!s}/{failed_step:d})"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def job_test_name(job):
|
def job_test_name(job):
|
||||||
return "{!s}@{!s}".format(OpenQABot.emd(job['settings']['TEST']), OpenQABot.emd(job['settings']['MACHINE']))
|
return f"{OpenQABot.emd(job['settings']['TEST'])!s}@{OpenQABot.emd(job['settings']['MACHINE'])!s}"
|
||||||
|
|
||||||
def summarize_one_openqa_job(self, job):
|
def summarize_one_openqa_job(self, job):
|
||||||
testurl = osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])])
|
testurl = osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])])
|
||||||
@ -311,7 +311,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
rstring = job['result']
|
rstring = job['result']
|
||||||
if rstring == 'none':
|
if rstring == 'none':
|
||||||
return None
|
return None
|
||||||
return '\n- [{!s}]({!s}) is {!s}'.format(self.job_test_name(job), testurl, rstring)
|
return f'\n- [{self.job_test_name(job)!s}]({testurl!s}) is {rstring!s}'
|
||||||
|
|
||||||
modstrings = []
|
modstrings = []
|
||||||
for module in job['modules']:
|
for module in job['modules']:
|
||||||
@ -320,15 +320,15 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
modstrings.append(self.get_step_url(testurl, module['name']))
|
modstrings.append(self.get_step_url(testurl, module['name']))
|
||||||
|
|
||||||
if modstrings:
|
if modstrings:
|
||||||
return '\n- [{!s}]({!s}) failed in {!s}'.format(self.job_test_name(job), testurl, ','.join(modstrings))
|
return f"\n- [{self.job_test_name(job)!s}]({testurl!s}) failed in {','.join(modstrings)!s}"
|
||||||
elif job['result'] == 'failed': # rare case: fail without module fails
|
elif job['result'] == 'failed': # rare case: fail without module fails
|
||||||
return '\n- [{!s}]({!s}) failed'.format(self.job_test_name(job), testurl)
|
return f'\n- [{self.job_test_name(job)!s}]({testurl!s}) failed'
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def summarize_openqa_jobs(self, jobs):
|
def summarize_openqa_jobs(self, jobs):
|
||||||
groups = {}
|
groups = {}
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
gl = "{!s}@{!s}".format(self.emd(job['group']), self.emd(job['settings']['FLAVOR']))
|
gl = f"{self.emd(job['group'])!s}@{self.emd(job['settings']['FLAVOR'])!s}"
|
||||||
if gl not in groups:
|
if gl not in groups:
|
||||||
groupurl = osc.core.makeurl(self.openqa.baseurl, ['tests', 'overview'],
|
groupurl = osc.core.makeurl(self.openqa.baseurl, ['tests', 'overview'],
|
||||||
{'version': job['settings']['VERSION'],
|
{'version': job['settings']['VERSION'],
|
||||||
@ -337,7 +337,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
'distri': job['settings']['DISTRI'],
|
'distri': job['settings']['DISTRI'],
|
||||||
'build': job['settings']['BUILD'],
|
'build': job['settings']['BUILD'],
|
||||||
})
|
})
|
||||||
groups[gl] = {'title': "__Group [{!s}]({!s})__\n".format(gl, groupurl),
|
groups[gl] = {'title': f"__Group [{gl!s}]({groupurl!s})__\n",
|
||||||
'passed': 0, 'unfinished': 0, 'failed': []}
|
'passed': 0, 'unfinished': 0, 'failed': []}
|
||||||
|
|
||||||
job_summary = self.summarize_one_openqa_job(job)
|
job_summary = self.summarize_one_openqa_job(job)
|
||||||
@ -360,11 +360,11 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
msg += "\n\n" + groups[group]['title']
|
msg += "\n\n" + groups[group]['title']
|
||||||
infos = []
|
infos = []
|
||||||
if groups[group]['passed']:
|
if groups[group]['passed']:
|
||||||
infos.append("{:d} tests passed".format(groups[group]['passed']))
|
infos.append(f"{groups[group]['passed']:d} tests passed")
|
||||||
if len(groups[group]['failed']):
|
if len(groups[group]['failed']):
|
||||||
infos.append("{:d} tests failed".format(len(groups[group]['failed'])))
|
infos.append(f"{len(groups[group]['failed']):d} tests failed")
|
||||||
if groups[group]['unfinished']:
|
if groups[group]['unfinished']:
|
||||||
infos.append("{:d} unfinished tests".format(groups[group]['unfinished']))
|
infos.append(f"{groups[group]['unfinished']:d} unfinished tests")
|
||||||
msg += "(" + ', '.join(infos) + ")\n"
|
msg += "(" + ', '.join(infos) + ")\n"
|
||||||
for fail in groups[group]['failed']:
|
for fail in groups[group]['failed']:
|
||||||
msg += fail
|
msg += fail
|
||||||
@ -382,7 +382,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
self.comment_write(state='done', message=msg, request=req, result='accepted')
|
self.comment_write(state='done', message=msg, request=req, result='accepted')
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
self.logger.debug("request {} waits for build".format(req.reqid))
|
self.logger.debug(f"request {req.reqid} waits for build")
|
||||||
elif qa_state == QA_FAILED or qa_state == QA_PASSED:
|
elif qa_state == QA_FAILED or qa_state == QA_PASSED:
|
||||||
if qa_state == QA_PASSED:
|
if qa_state == QA_PASSED:
|
||||||
msg = "openQA tests passed\n"
|
msg = "openQA tests passed\n"
|
||||||
@ -477,7 +477,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
else:
|
else:
|
||||||
posts += self.check_product_arch(job, product_prefix, pmap, None)
|
posts += self.check_product_arch(job, product_prefix, pmap, None)
|
||||||
|
|
||||||
self.logger.debug("Pmap: {} Posts: {}".format(pmap, posts))
|
self.logger.debug(f"Pmap: {pmap} Posts: {posts}")
|
||||||
return posts
|
return posts
|
||||||
|
|
||||||
def incident_openqa_jobs(self, s):
|
def incident_openqa_jobs(self, s):
|
||||||
@ -533,7 +533,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
def check_suse_incidents(self):
|
def check_suse_incidents(self):
|
||||||
self.wait_for_build = set()
|
self.wait_for_build = set()
|
||||||
for inc in requests.get('https://maintenance.suse.de/api/incident/active/').json():
|
for inc in requests.get('https://maintenance.suse.de/api/incident/active/').json():
|
||||||
self.logger.info("Incident number: {}".format(inc))
|
self.logger.info(f"Incident number: {inc}")
|
||||||
|
|
||||||
mesh_job = requests.get('https://maintenance.suse.de/api/incident/' + inc).json()
|
mesh_job = requests.get('https://maintenance.suse.de/api/incident/' + inc).json()
|
||||||
|
|
||||||
@ -543,18 +543,18 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
self.test_job(mesh_job['base'])
|
self.test_job(mesh_job['base'])
|
||||||
|
|
||||||
def test_job(self, mesh_job):
|
def test_job(self, mesh_job):
|
||||||
self.logger.debug("Called test_job with: {}".format(mesh_job))
|
self.logger.debug(f"Called test_job with: {mesh_job}")
|
||||||
incident_project = str(mesh_job['project'])
|
incident_project = str(mesh_job['project'])
|
||||||
try:
|
try:
|
||||||
comment_info = self.find_obs_request_comment(project_name=incident_project)
|
comment_info = self.find_obs_request_comment(project_name=incident_project)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
self.logger.debug("Couldn't load comments - {}".format(e))
|
self.logger.debug(f"Couldn't load comments - {e}")
|
||||||
return
|
return
|
||||||
comment_build = str(comment_info.get('revision', ''))
|
comment_build = str(comment_info.get('revision', ''))
|
||||||
|
|
||||||
openqa_posts = []
|
openqa_posts = []
|
||||||
for prod in self.api_map.keys():
|
for prod in self.api_map.keys():
|
||||||
self.logger.debug("{} -- product in apimap".format(prod))
|
self.logger.debug(f"{prod} -- product in apimap")
|
||||||
openqa_posts += self.check_product(mesh_job, prod)
|
openqa_posts += self.check_product(mesh_job, prod)
|
||||||
openqa_jobs = []
|
openqa_jobs = []
|
||||||
for s in openqa_posts:
|
for s in openqa_posts:
|
||||||
@ -565,13 +565,13 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
# take the project comment as marker for not posting jobs
|
# take the project comment as marker for not posting jobs
|
||||||
if not len(jobs) and comment_build != str(mesh_job['openqa_build']):
|
if not len(jobs) and comment_build != str(mesh_job['openqa_build']):
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
self.logger.info('WOULD POST:{}'.format(pformat(json.dumps(s, sort_keys=True))))
|
self.logger.info(f'WOULD POST:{pformat(json.dumps(s, sort_keys=True))}')
|
||||||
else:
|
else:
|
||||||
self.logger.info("Posted: {}".format(pformat(json.dumps(s, sort_keys=True))))
|
self.logger.info(f"Posted: {pformat(json.dumps(s, sort_keys=True))}")
|
||||||
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
||||||
openqa_jobs += self.incident_openqa_jobs(s)
|
openqa_jobs += self.incident_openqa_jobs(s)
|
||||||
else:
|
else:
|
||||||
self.logger.info("{} got {}".format(pformat(s), len(jobs)))
|
self.logger.info(f"{pformat(s)} got {len(jobs)}")
|
||||||
openqa_jobs += jobs
|
openqa_jobs += jobs
|
||||||
|
|
||||||
self.openqa_jobs[incident_project] = openqa_jobs
|
self.openqa_jobs[incident_project] = openqa_jobs
|
||||||
|
@ -63,6 +63,6 @@ class openSUSEUpdate(Update):
|
|||||||
packages = self.packages(src_prj, dst_prj)
|
packages = self.packages(src_prj, dst_prj)
|
||||||
settings['INSTALL_PACKAGES'] = ' '.join(packages.keys())
|
settings['INSTALL_PACKAGES'] = ' '.join(packages.keys())
|
||||||
settings['VERIFY_PACKAGE_VERSIONS'] = ' '.join(
|
settings['VERIFY_PACKAGE_VERSIONS'] = ' '.join(
|
||||||
['{} {}-{}'.format(p.name, p.version, p.release) for p in packages.values()])
|
[f'{p.name} {p.version}-{p.release}' for p in packages.values()])
|
||||||
|
|
||||||
return [settings]
|
return [settings]
|
||||||
|
@ -89,5 +89,5 @@ class SUSEUpdate(Update):
|
|||||||
return []
|
return []
|
||||||
settings += self.add_minimal_settings(src_prj, settings[0])
|
settings += self.add_minimal_settings(src_prj, settings[0])
|
||||||
settings += self.add_kernel_settings(settings[0])
|
settings += self.add_kernel_settings(settings[0])
|
||||||
self.logger.debug("settings are: {}".format(settings))
|
self.logger.debug(f"settings are: {settings}")
|
||||||
return settings
|
return settings
|
||||||
|
@ -21,13 +21,13 @@ class Update(object):
|
|||||||
def get_max_revision(self, job):
|
def get_max_revision(self, job):
|
||||||
repo = self.repo_prefix + '/'
|
repo = self.repo_prefix + '/'
|
||||||
repo += self.maintenance_project.replace(':', ':/')
|
repo += self.maintenance_project.replace(':', ':/')
|
||||||
repo += ':/{!s}'.format(job['id'])
|
repo += f":/{job['id']!s}"
|
||||||
max_revision = 0
|
max_revision = 0
|
||||||
for channel in job['channels']:
|
for channel in job['channels']:
|
||||||
crepo = repo + '/' + channel.replace(':', '_')
|
crepo = repo + '/' + channel.replace(':', '_')
|
||||||
xml = requests.get(crepo + '/repodata/repomd.xml')
|
xml = requests.get(crepo + '/repodata/repomd.xml')
|
||||||
if not xml.ok:
|
if not xml.ok:
|
||||||
self.logger.info("{} skipped .. need wait".format(crepo))
|
self.logger.info(f"{crepo} skipped .. need wait")
|
||||||
# if one fails, we skip it and wait
|
# if one fails, we skip it and wait
|
||||||
return False
|
return False
|
||||||
root = ET.fromstring(bytes(xml.text, encoding='utf-8'))
|
root = ET.fromstring(bytes(xml.text, encoding='utf-8'))
|
||||||
@ -45,7 +45,7 @@ class Update(object):
|
|||||||
s['BUILD'] = ':' + build
|
s['BUILD'] = ':' + build
|
||||||
name = self.incident_name(src_prj)
|
name = self.incident_name(src_prj)
|
||||||
repo = dst_prj.replace(':', '_')
|
repo = dst_prj.replace(':', '_')
|
||||||
repo = '{!s}/{!s}/{!s}/'.format(self.repo_prefix, src_prj.replace(':', ':/'), repo)
|
repo = f"{self.repo_prefix!s}/{src_prj.replace(':', ':/')!s}/{repo!s}/"
|
||||||
patch_id = self.patch_id(repo)
|
patch_id = self.patch_id(repo)
|
||||||
if not patch_id and self.opensuse:
|
if not patch_id and self.opensuse:
|
||||||
# hot fix for openSUSE
|
# hot fix for openSUSE
|
||||||
|
@ -86,7 +86,7 @@ class OriginManager(ReviewBot.ReviewBot):
|
|||||||
def check_source_submission(self, src_project, src_package, src_rev, tgt_project, tgt_package) -> Optional[bool]:
|
def check_source_submission(self, src_project, src_package, src_rev, tgt_project, tgt_package) -> Optional[bool]:
|
||||||
kind = package_kind(self.apiurl, tgt_project, tgt_package)
|
kind = package_kind(self.apiurl, tgt_project, tgt_package)
|
||||||
if not (kind is None or kind == 'source'):
|
if not (kind is None or kind == 'source'):
|
||||||
self.review_messages['accepted'] = 'skipping {} package since not source'.format(kind)
|
self.review_messages['accepted'] = f'skipping {kind} package since not source'
|
||||||
return True
|
return True
|
||||||
|
|
||||||
advance, result = self.config_validate(tgt_project)
|
advance, result = self.config_validate(tgt_project)
|
||||||
@ -172,7 +172,7 @@ class OriginManager(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
override, who = self.devel_project_simulate_check_command(source_project, target_project)
|
override, who = self.devel_project_simulate_check_command(source_project, target_project)
|
||||||
if override:
|
if override:
|
||||||
return override, 'change_devel command by {}'.format(who)
|
return override, f'change_devel command by {who}'
|
||||||
|
|
||||||
return False, None
|
return False, None
|
||||||
|
|
||||||
@ -244,7 +244,7 @@ class OriginManager(ReviewBot.ReviewBot):
|
|||||||
def policy_result_comment_add(self, project, package, comments):
|
def policy_result_comment_add(self, project, package, comments):
|
||||||
message = '\n\n'.join(comments)
|
message = '\n\n'.join(comments)
|
||||||
if len(self.request.actions) > 1:
|
if len(self.request.actions) > 1:
|
||||||
message = '## {}/{}\n\n{}'.format(project, package, message)
|
message = f'## {project}/{package}\n\n{message}'
|
||||||
suffix = '::'.join([project, package])
|
suffix = '::'.join([project, package])
|
||||||
else:
|
else:
|
||||||
suffix = None
|
suffix = None
|
||||||
|
@ -28,10 +28,10 @@ def do_cycle(self, subcmd, opts, *args):
|
|||||||
deps = ET.fromstring(get_dependson(apiurl, opts.project, opts.repository, opts.arch, [pkgname]))
|
deps = ET.fromstring(get_dependson(apiurl, opts.project, opts.repository, opts.arch, [pkgname]))
|
||||||
|
|
||||||
pkg = deps.find('package')
|
pkg = deps.find('package')
|
||||||
print("\"%s\"" % pkgname)
|
print(f"\"{pkgname}\"")
|
||||||
for deps in pkg.findall('pkgdep'):
|
for deps in pkg.findall('pkgdep'):
|
||||||
if deps.text in args:
|
if deps.text in args:
|
||||||
print("\"%s\" -> \"%s\"" % (deps.text, pkgname))
|
print(f"\"{deps.text}\" -> \"{pkgname}\"")
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
# Ignore packages that do not exist
|
# Ignore packages that do not exist
|
||||||
print("[color=red]")
|
print("[color=red]")
|
||||||
|
@ -75,7 +75,7 @@ def do_origin(self, subcmd, opts, *args):
|
|||||||
command = args[0]
|
command = args[0]
|
||||||
if command not in ['config', 'cron', 'history', 'list', 'package', 'potentials',
|
if command not in ['config', 'cron', 'history', 'list', 'package', 'potentials',
|
||||||
'projects', 'report', 'update']:
|
'projects', 'report', 'update']:
|
||||||
raise oscerr.WrongArgs('Unknown command: {}'.format(command))
|
raise oscerr.WrongArgs(f'Unknown command: {command}')
|
||||||
if command == 'package' and len(args) < 2:
|
if command == 'package' and len(args) < 2:
|
||||||
raise oscerr.WrongArgs('A package must be indicated.')
|
raise oscerr.WrongArgs('A package must be indicated.')
|
||||||
|
|
||||||
@ -93,9 +93,9 @@ def do_origin(self, subcmd, opts, *args):
|
|||||||
raise oscerr.WrongArgs('A project must be indicated.')
|
raise oscerr.WrongArgs('A project must be indicated.')
|
||||||
config = config_load(apiurl, opts.project)
|
config = config_load(apiurl, opts.project)
|
||||||
if not config:
|
if not config:
|
||||||
raise oscerr.WrongArgs('OSRT:OriginConfig attribute missing from {}'.format(opts.project))
|
raise oscerr.WrongArgs(f'OSRT:OriginConfig attribute missing from {opts.project}')
|
||||||
|
|
||||||
function = 'osrt_origin_{}'.format(command)
|
function = f'osrt_origin_{command}'
|
||||||
globals()[function](apiurl, opts, *args[1:])
|
globals()[function](apiurl, opts, *args[1:])
|
||||||
|
|
||||||
|
|
||||||
@ -118,12 +118,12 @@ def osrt_origin_cron(apiurl, opts, *args):
|
|||||||
if os.path.exists(lookup_path):
|
if os.path.exists(lookup_path):
|
||||||
# Update the last accessed time to avoid cache manager culling.
|
# Update the last accessed time to avoid cache manager culling.
|
||||||
os.utime(lookup_path, (time.time(), os.stat(lookup_path).st_mtime))
|
os.utime(lookup_path, (time.time(), os.stat(lookup_path).st_mtime))
|
||||||
print('{}<locked> lookup preserved'.format(project))
|
print(f'{project}<locked> lookup preserved')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Force update lookup information.
|
# Force update lookup information.
|
||||||
lookup = osrt_origin_lookup(apiurl, project, force_refresh=True, quiet=True)
|
lookup = osrt_origin_lookup(apiurl, project, force_refresh=True, quiet=True)
|
||||||
print('{} lookup updated for {} package(s)'.format(project, len(lookup)))
|
print(f'{project} lookup updated for {len(lookup)} package(s)')
|
||||||
|
|
||||||
|
|
||||||
def osrt_origin_dump(format, data):
|
def osrt_origin_dump(format, data):
|
||||||
@ -133,7 +133,7 @@ def osrt_origin_dump(format, data):
|
|||||||
print(yaml.dump(data))
|
print(yaml.dump(data))
|
||||||
else:
|
else:
|
||||||
if format != 'plain':
|
if format != 'plain':
|
||||||
print('unknown format: {}'.format(format), file=sys.stderr)
|
print(f'unknown format: {format}', file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -203,7 +203,7 @@ def osrt_origin_lookup(apiurl, project, force_refresh=False, previous=False, qui
|
|||||||
|
|
||||||
if not previous and not quiet:
|
if not previous and not quiet:
|
||||||
dt = timedelta(seconds=time.time() - os.stat(lookup_path).st_mtime)
|
dt = timedelta(seconds=time.time() - os.stat(lookup_path).st_mtime)
|
||||||
print('# generated {} ago'.format(dt), file=sys.stderr)
|
print(f'# generated {dt} ago', file=sys.stderr)
|
||||||
|
|
||||||
return lookup
|
return lookup
|
||||||
|
|
||||||
@ -353,7 +353,7 @@ def osrt_origin_report(apiurl, opts, *args):
|
|||||||
print(body)
|
print(body)
|
||||||
|
|
||||||
if opts.mail:
|
if opts.mail:
|
||||||
mail_send(apiurl, opts.project, 'release-list', '{} origin report'.format(opts.project),
|
mail_send(apiurl, opts.project, 'release-list', f'{opts.project} origin report',
|
||||||
body, None, dry=opts.dry)
|
body, None, dry=opts.dry)
|
||||||
|
|
||||||
|
|
||||||
@ -369,7 +369,7 @@ def osrt_origin_update(apiurl, opts, *packages):
|
|||||||
packages = osrt_origin_update_packages(apiurl, opts.project)
|
packages = osrt_origin_update_packages(apiurl, opts.project)
|
||||||
|
|
||||||
for package in packages:
|
for package in packages:
|
||||||
print('checking for updates to {}/{}...'.format(opts.project, package))
|
print(f'checking for updates to {opts.project}/{package}...')
|
||||||
|
|
||||||
request_future = origin_update(apiurl, opts.project, package)
|
request_future = origin_update(apiurl, opts.project, package)
|
||||||
if request_future:
|
if request_future:
|
||||||
|
@ -52,14 +52,14 @@ def do_pcheck(self, subcmd, opts, project):
|
|||||||
continue
|
continue
|
||||||
elif sinfo.find('linked') is not None:
|
elif sinfo.find('linked') is not None:
|
||||||
elm = sinfo.find('linked')
|
elm = sinfo.find('linked')
|
||||||
key = '%s/%s' % (elm.get('project'), elm.get('package'))
|
key = f"{elm.get('project')}/{elm.get('package')}"
|
||||||
pmap.setdefault(key, []).append(pkg)
|
pmap.setdefault(key, []).append(pkg)
|
||||||
todo.setdefault(elm.get('project'), []).append(elm.get('package'))
|
todo.setdefault(elm.get('project'), []).append(elm.get('package'))
|
||||||
md5s[pkg] = sinfo.get('verifymd5')
|
md5s[pkg] = sinfo.get('verifymd5')
|
||||||
for prj, pkgs in todo.items():
|
for prj, pkgs in todo.items():
|
||||||
sinfos = osc.core.get_project_sourceinfo(apiurl, prj, True, *pkgs)
|
sinfos = osc.core.get_project_sourceinfo(apiurl, prj, True, *pkgs)
|
||||||
for pkg, sinfo in sinfos.items():
|
for pkg, sinfo in sinfos.items():
|
||||||
key = '%s/%s' % (prj, pkg)
|
key = f'{prj}/{pkg}'
|
||||||
for p in pmap[key]:
|
for p in pmap[key]:
|
||||||
vmd5 = md5s.pop(p)
|
vmd5 = md5s.pop(p)
|
||||||
if vmd5 == sinfo.get('verifymd5'):
|
if vmd5 == sinfo.get('verifymd5'):
|
||||||
@ -74,24 +74,24 @@ def do_pcheck(self, subcmd, opts, project):
|
|||||||
if opts.message:
|
if opts.message:
|
||||||
message = opts.message
|
message = opts.message
|
||||||
else:
|
else:
|
||||||
message = "Scripted push from {project}".format(project=project)
|
message = f"Scripted push from {project}"
|
||||||
api.create(project=project, package=p, target=prj, message=message)
|
api.create(project=project, package=p, target=prj, message=message)
|
||||||
|
|
||||||
overview = 'Overview of project {}'.format(project)
|
overview = f'Overview of project {project}'
|
||||||
print()
|
print()
|
||||||
print(overview)
|
print(overview)
|
||||||
print('=' * len(overview))
|
print('=' * len(overview))
|
||||||
print('Changed & unsubmitted packages: %d' % len(changed))
|
print(f'Changed & unsubmitted packages: {len(changed)}')
|
||||||
print(', '.join(changed))
|
print(', '.join(changed))
|
||||||
print()
|
print()
|
||||||
print('Changed & submitted packages: %d' % len(changeSRed.keys()))
|
print(f'Changed & submitted packages: {len(changeSRed.keys())}')
|
||||||
print(', '.join(['%s(%s)' % (pkg, SR) for pkg, SR in changeSRed.items()]))
|
print(', '.join([f'{pkg}({SR})' for pkg, SR in changeSRed.items()]))
|
||||||
print()
|
print()
|
||||||
print('Packages without link: %d' % len(md5s.keys()))
|
print(f'Packages without link: {len(md5s.keys())}')
|
||||||
print(', '.join(md5s.keys()))
|
print(', '.join(md5s.keys()))
|
||||||
print()
|
print()
|
||||||
print('Packages with errors: %d' % len(errors.keys()))
|
print(f'Packages with errors: {len(errors.keys())}')
|
||||||
print('\n'.join(['%s: %s' % (p, err) for p, err in errors.items()]))
|
print('\n'.join([f'{p}: {err}' for p, err in errors.items()]))
|
||||||
|
|
||||||
|
|
||||||
class oscapi:
|
class oscapi:
|
||||||
@ -110,7 +110,7 @@ class oscapi:
|
|||||||
|
|
||||||
def create(self, project, package, target, message):
|
def create(self, project, package, target, message):
|
||||||
currev = osc.core.get_source_rev(self.apiurl, project, package)['rev']
|
currev = osc.core.get_source_rev(self.apiurl, project, package)['rev']
|
||||||
print("Creating a request from {project}/{package}".format(project=project, package=package))
|
print(f"Creating a request from {project}/{package}")
|
||||||
query = {'cmd': 'create'}
|
query = {'cmd': 'create'}
|
||||||
url = osc.core.makeurl(self.apiurl, ['request'], query=query)
|
url = osc.core.makeurl(self.apiurl, ['request'], query=query)
|
||||||
|
|
||||||
|
@ -54,14 +54,14 @@ def _full_project_name(self, project):
|
|||||||
return project
|
return project
|
||||||
|
|
||||||
if project.startswith('Factory'):
|
if project.startswith('Factory'):
|
||||||
return 'openSUSE:%s' % project
|
return f'openSUSE:{project}'
|
||||||
|
|
||||||
if project.startswith('SLE') or project.startswith('ALP'):
|
if project.startswith('SLE') or project.startswith('ALP'):
|
||||||
return 'SUSE:%s' % project
|
return f'SUSE:{project}'
|
||||||
|
|
||||||
# If we can't guess, raise a Warning
|
# If we can't guess, raise a Warning
|
||||||
if (':' not in project):
|
if (':' not in project):
|
||||||
warnings.warn('%s project not recognized.' % project)
|
warnings.warn(f'{project} project not recognized.')
|
||||||
return project
|
return project
|
||||||
|
|
||||||
|
|
||||||
@ -360,7 +360,7 @@ def do_staging(self, subcmd, opts, *args):
|
|||||||
):
|
):
|
||||||
min_args, max_args = 0, 0
|
min_args, max_args = 0, 0
|
||||||
else:
|
else:
|
||||||
raise oscerr.WrongArgs('Unknown command: %s' % cmd)
|
raise oscerr.WrongArgs(f'Unknown command: {cmd}')
|
||||||
args = clean_args(args)
|
args = clean_args(args)
|
||||||
if len(args) - 1 < min_args:
|
if len(args) - 1 < min_args:
|
||||||
raise oscerr.WrongArgs('Too few arguments.')
|
raise oscerr.WrongArgs('Too few arguments.')
|
||||||
@ -551,7 +551,7 @@ def do_staging(self, subcmd, opts, *args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
for group, info in sorted(proposal.items()):
|
for group, info in sorted(proposal.items()):
|
||||||
print('Staging {} in {}'.format(group, info['staging']))
|
print(f"Staging {group} in {info['staging']}")
|
||||||
|
|
||||||
# SelectCommand expects strings.
|
# SelectCommand expects strings.
|
||||||
request_ids = map(str, info['requests'].keys())
|
request_ids = map(str, info['requests'].keys())
|
||||||
|
@ -58,7 +58,7 @@ class PubSubConsumer(object):
|
|||||||
|
|
||||||
def still_alive(self):
|
def still_alive(self):
|
||||||
# output something so gocd doesn't consider it stalled
|
# output something so gocd doesn't consider it stalled
|
||||||
self.logger.info('Still alive: {}'.format(datetime.now().time()))
|
self.logger.info(f'Still alive: {datetime.now().time()}')
|
||||||
if self._run_until and time.time() > self._run_until:
|
if self._run_until and time.time() > self._run_until:
|
||||||
self.stop()
|
self.stop()
|
||||||
else:
|
else:
|
||||||
|
@ -61,7 +61,7 @@ class AcceptCommand(object):
|
|||||||
if link['project'] in self.api.rings or link['project'] == self.api.project:
|
if link['project'] in self.api.rings or link['project'] == self.api.project:
|
||||||
print(f"delete {link['project']}/{link['package']}")
|
print(f"delete {link['project']}/{link['package']}")
|
||||||
delete_package(self.api.apiurl, link['project'], link['package'],
|
delete_package(self.api.apiurl, link['project'], link['package'],
|
||||||
msg="remove link while accepting delete of {}".format(package))
|
msg=f"remove link while accepting delete of {package}")
|
||||||
|
|
||||||
def check_request_for_bugowner(self, to_request, package, id):
|
def check_request_for_bugowner(self, to_request, package, id):
|
||||||
url = self.api.makeurl(['request', str(id)])
|
url = self.api.makeurl(['request', str(id)])
|
||||||
@ -103,7 +103,7 @@ class AcceptCommand(object):
|
|||||||
if accept_all_green:
|
if accept_all_green:
|
||||||
continue
|
continue
|
||||||
if not force:
|
if not force:
|
||||||
print('The project "{}" is not yet acceptable.'.format(project))
|
print(f'The project "{project}" is not yet acceptable.')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
staging_packages[project] = []
|
staging_packages[project] = []
|
||||||
@ -137,7 +137,7 @@ class AcceptCommand(object):
|
|||||||
|
|
||||||
for req in other_new:
|
for req in other_new:
|
||||||
print(f"Accepting request {req['id']}: {req['package']}")
|
print(f"Accepting request {req['id']}: {req['package']}")
|
||||||
change_request_state(self.api.apiurl, str(req['id']), 'accepted', message='Accept to %s' % self.api.project)
|
change_request_state(self.api.apiurl, str(req['id']), 'accepted', message=f'Accept to {self.api.project}')
|
||||||
|
|
||||||
for project in sorted(staging_packages.keys()):
|
for project in sorted(staging_packages.keys()):
|
||||||
print(f'waiting for staging project {project} to be accepted')
|
print(f'waiting for staging project {project} to be accepted')
|
||||||
@ -146,7 +146,7 @@ class AcceptCommand(object):
|
|||||||
status = self.api.project_status(project, reload=True)
|
status = self.api.project_status(project, reload=True)
|
||||||
if status.get('state') == 'empty':
|
if status.get('state') == 'empty':
|
||||||
break
|
break
|
||||||
print('{} requests still staged - waiting'.format(status.find('staged_requests').get('count')))
|
print(f"{status.find('staged_requests').get('count')} requests still staged - waiting")
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
self.api.accept_status_comment(project, staging_packages[project])
|
self.api.accept_status_comment(project, staging_packages[project])
|
||||||
@ -185,7 +185,7 @@ class AcceptCommand(object):
|
|||||||
clean_list = set(pkglist) - set(self.api.cnocleanup_packages)
|
clean_list = set(pkglist) - set(self.api.cnocleanup_packages)
|
||||||
|
|
||||||
for package in clean_list:
|
for package in clean_list:
|
||||||
print("[cleanup] deleted %s/%s" % (project, package))
|
print(f"[cleanup] deleted {project}/{package}")
|
||||||
delete_package(self.api.apiurl, project, package, force=True, msg="autocleanup")
|
delete_package(self.api.apiurl, project, package, force=True, msg="autocleanup")
|
||||||
|
|
||||||
return
|
return
|
||||||
|
@ -40,12 +40,12 @@ class AdiCommand:
|
|||||||
review.get('request')))
|
review.get('request')))
|
||||||
return
|
return
|
||||||
for check in info.findall('missing_checks/check'):
|
for check in info.findall('missing_checks/check'):
|
||||||
print(query_project + ' ' + Fore.MAGENTA + 'missing: {}'.format(check.get('name')))
|
print(query_project + ' ' + Fore.MAGENTA + f"missing: {check.get('name')}")
|
||||||
return
|
return
|
||||||
for check in info.findall('checks/check'):
|
for check in info.findall('checks/check'):
|
||||||
state = check.find('state').text
|
state = check.find('state').text
|
||||||
if state != 'success':
|
if state != 'success':
|
||||||
print(query_project + '{} {} check: {}'.format(Fore.MAGENTA, state, check.get('name')))
|
print(query_project + f"{Fore.MAGENTA} {state} check: {check.get('name')}")
|
||||||
return
|
return
|
||||||
|
|
||||||
overall_state = info.get('state')
|
overall_state = info.get('state')
|
||||||
@ -59,7 +59,7 @@ class AdiCommand:
|
|||||||
|
|
||||||
ready = []
|
ready = []
|
||||||
for req in info.findall('staged_requests/request'):
|
for req in info.findall('staged_requests/request'):
|
||||||
ready.append('{}[{}]'.format(Fore.CYAN + req.get('package') + Fore.RESET, req.get('id')))
|
ready.append(f"{Fore.CYAN + req.get('package') + Fore.RESET}[{req.get('id')}]")
|
||||||
if len(ready):
|
if len(ready):
|
||||||
print(query_project, Fore.GREEN + 'ready:', ', '.join(ready))
|
print(query_project, Fore.GREEN + 'ready:', ', '.join(ready))
|
||||||
|
|
||||||
@ -98,7 +98,7 @@ class AdiCommand:
|
|||||||
request_id = int(request.get('id'))
|
request_id = int(request.get('id'))
|
||||||
target = request.find('./action/target')
|
target = request.find('./action/target')
|
||||||
target_package = target.get('package')
|
target_package = target.get('package')
|
||||||
line = '- {} {}{:<30}{}'.format(request_id, Fore.CYAN, target_package, Fore.RESET)
|
line = f'- {request_id} {Fore.CYAN}{target_package:<30}{Fore.RESET}'
|
||||||
|
|
||||||
message = self.api.ignore_format(request_id)
|
message = self.api.ignore_format(request_id)
|
||||||
if message:
|
if message:
|
||||||
@ -108,7 +108,7 @@ class AdiCommand:
|
|||||||
# Auto-superseding request in adi command
|
# Auto-superseding request in adi command
|
||||||
stage_info, code = self.api.update_superseded_request(request)
|
stage_info, code = self.api.update_superseded_request(request)
|
||||||
if stage_info:
|
if stage_info:
|
||||||
print(line + ' ({})'.format(SupersedeCommand.CODE_MAP[code]))
|
print(line + f' ({SupersedeCommand.CODE_MAP[code]})')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Only create staging projec the first time a non superseded
|
# Only create staging projec the first time a non superseded
|
||||||
@ -120,7 +120,7 @@ class AdiCommand:
|
|||||||
if not self.api.rq_to_prj(request_id, name):
|
if not self.api.rq_to_prj(request_id, name):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
print(line + Fore.GREEN + ' (staged in {})'.format(name) + Fore.RESET)
|
print(line + Fore.GREEN + f' (staged in {name})' + Fore.RESET)
|
||||||
|
|
||||||
def perform(self, packages, move=False, split=False):
|
def perform(self, packages, move=False, split=False):
|
||||||
"""
|
"""
|
||||||
|
@ -15,23 +15,23 @@ class CheckCommand(object):
|
|||||||
# Check for superseded requests
|
# Check for superseded requests
|
||||||
for r in project.findall('obsolete_requests/*'):
|
for r in project.findall('obsolete_requests/*'):
|
||||||
if r.get('state') == 'superseded':
|
if r.get('state') == 'superseded':
|
||||||
report.extend(' - Request %s is superseded by %s' % (r.get('id'), r.get('superseded_by')))
|
report.extend(f" - Request {r.get('id')} is superseded by {r.get('superseded_by')}")
|
||||||
|
|
||||||
# Untracked requests
|
# Untracked requests
|
||||||
for r in project.findall('untracked_requests/*'):
|
for r in project.findall('untracked_requests/*'):
|
||||||
report.extend(' - Request %s is no tracked but is open for the project' % r.get('id'))
|
report.extend(f" - Request {r.get('id')} is no tracked but is open for the project")
|
||||||
|
|
||||||
# Status of obsolete requests
|
# Status of obsolete requests
|
||||||
for r in project.findall('obsolete_requests/*'):
|
for r in project.findall('obsolete_requests/*'):
|
||||||
if r.get('state') == 'superseded':
|
if r.get('state') == 'superseded':
|
||||||
continue
|
continue
|
||||||
report.append(' - %s: %s' % (r.get('package'), r.get('state')))
|
report.append(f" - {r.get('package')}: {r.get('state')}")
|
||||||
if not verbose:
|
if not verbose:
|
||||||
break
|
break
|
||||||
|
|
||||||
# Missing reviews
|
# Missing reviews
|
||||||
for r in project.findall('missing_reviews/review'):
|
for r in project.findall('missing_reviews/review'):
|
||||||
report.append(' - %s: Missing reviews: %s' % (r.get('package'), self.api.format_review(r)))
|
report.append(f" - {r.get('package')}: Missing reviews: {self.api.format_review(r)}")
|
||||||
if not verbose:
|
if not verbose:
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -39,7 +39,7 @@ class CheckCommand(object):
|
|||||||
if project.find('building_repositories/repo') is not None:
|
if project.find('building_repositories/repo') is not None:
|
||||||
report.append(' - At least following repositories are still building:')
|
report.append(' - At least following repositories are still building:')
|
||||||
for r in project.findall('building_repositories/*'):
|
for r in project.findall('building_repositories/*'):
|
||||||
report.append(' %s/%s: %s' % (r.get('repository'), r.get('arch'), r.get('state')))
|
report.append(f" {r.get('repository')}/{r.get('arch')}: {r.get('state')}")
|
||||||
if not verbose:
|
if not verbose:
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ class CheckCommand(object):
|
|||||||
if project.find('broken_packages/package') is not None:
|
if project.find('broken_packages/package') is not None:
|
||||||
report.append(' - Following packages are broken:')
|
report.append(' - Following packages are broken:')
|
||||||
for r in project.findall('broken_packages/package'):
|
for r in project.findall('broken_packages/package'):
|
||||||
report.append(' %s (%s): %s' % (r.get('package'), r.get('repository'), r.get('state')))
|
report.append(f" {r.get('package')} ({r.get('repository')}): {r.get('state')}")
|
||||||
if not verbose:
|
if not verbose:
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ class CheckCommand(object):
|
|||||||
for check in project.findall('checks/*'):
|
for check in project.findall('checks/*'):
|
||||||
state = check.find('state').text
|
state = check.find('state').text
|
||||||
if state != 'success':
|
if state != 'success':
|
||||||
info = " - %s check: %s" % (state, check.get('name'))
|
info = f" - {state} check: {check.get('name')}"
|
||||||
url = check.find('url')
|
url = check.find('url')
|
||||||
if url is not None:
|
if url is not None:
|
||||||
info += " " + url.text
|
info += " " + url.text
|
||||||
@ -66,7 +66,7 @@ class CheckCommand(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if project.get('state') == 'acceptable':
|
if project.get('state') == 'acceptable':
|
||||||
report.insert(0, ' ++ Acceptable staging project %s' % project.get('name'))
|
report.insert(0, f" ++ Acceptable staging project {project.get('name')}")
|
||||||
elif project.get('state') != 'empty':
|
elif project.get('state') != 'empty':
|
||||||
report.insert(0, ' -- %s Project %s still needs attention' % (project.get('state').upper(),
|
report.insert(0, ' -- %s Project %s still needs attention' % (project.get('state').upper(),
|
||||||
project.get('name')))
|
project.get('name')))
|
||||||
|
@ -35,7 +35,7 @@ class CleanupRings(object):
|
|||||||
|
|
||||||
def perform(self):
|
def perform(self):
|
||||||
for index, ring in enumerate(self.api.rings):
|
for index, ring in enumerate(self.api.rings):
|
||||||
print('# {}'.format(ring))
|
print(f'# {ring}')
|
||||||
ring_next = self.api.rings[index + 1] if index + 1 < len(self.api.rings) else None
|
ring_next = self.api.rings[index + 1] if index + 1 < len(self.api.rings) else None
|
||||||
self.check_depinfo_ring(ring, ring_next)
|
self.check_depinfo_ring(ring, ring_next)
|
||||||
|
|
||||||
@ -53,31 +53,31 @@ class CleanupRings(object):
|
|||||||
links = si.findall('linked')
|
links = si.findall('linked')
|
||||||
pkg = si.get('package')
|
pkg = si.get('package')
|
||||||
if links is None or len(links) == 0:
|
if links is None or len(links) == 0:
|
||||||
print('# {} not a link'.format(pkg))
|
print(f'# {pkg} not a link')
|
||||||
else:
|
else:
|
||||||
linked = links[0]
|
linked = links[0]
|
||||||
dprj = linked.get('project')
|
dprj = linked.get('project')
|
||||||
dpkg = linked.get('package')
|
dpkg = linked.get('package')
|
||||||
if dprj != self.api.project:
|
if dprj != self.api.project:
|
||||||
if not dprj.startswith(self.api.crings):
|
if not dprj.startswith(self.api.crings):
|
||||||
print("#{} not linking to base {} but {}".format(pkg, self.api.project, dprj))
|
print(f"#{pkg} not linking to base {self.api.project} but {dprj}")
|
||||||
self.links[pkg] = dpkg
|
self.links[pkg] = dpkg
|
||||||
# multi spec package must link to ring
|
# multi spec package must link to ring
|
||||||
elif len(links) > 1:
|
elif len(links) > 1:
|
||||||
mainpkg = links[1].get('package')
|
mainpkg = links[1].get('package')
|
||||||
mainprj = links[1].get('project')
|
mainprj = links[1].get('project')
|
||||||
if mainprj != self.api.project:
|
if mainprj != self.api.project:
|
||||||
print('# FIXME: {} links to {}'.format(pkg, mainprj))
|
print(f'# FIXME: {pkg} links to {mainprj}')
|
||||||
else:
|
else:
|
||||||
destring = None
|
destring = None
|
||||||
if mainpkg in self.api.ring_packages:
|
if mainpkg in self.api.ring_packages:
|
||||||
destring = self.api.ring_packages[mainpkg]
|
destring = self.api.ring_packages[mainpkg]
|
||||||
if not destring:
|
if not destring:
|
||||||
print('# {} links to {} but is not in a ring'.format(pkg, mainpkg))
|
print(f'# {pkg} links to {mainpkg} but is not in a ring')
|
||||||
print("osc linkpac {}/{} {}/{}".format(mainprj, mainpkg, prj, mainpkg))
|
print(f"osc linkpac {mainprj}/{mainpkg} {prj}/{mainpkg}")
|
||||||
else:
|
else:
|
||||||
if pkg != 'glibc.i686': # FIXME: ugly exception
|
if pkg != 'glibc.i686': # FIXME: ugly exception
|
||||||
print("osc linkpac -f {}/{} {}/{}".format(destring, mainpkg, prj, pkg))
|
print(f"osc linkpac -f {destring}/{mainpkg} {prj}/{pkg}")
|
||||||
self.links[pkg] = mainpkg
|
self.links[pkg] = mainpkg
|
||||||
|
|
||||||
def fill_pkginfo(self, prj, repo, arch):
|
def fill_pkginfo(self, prj, repo, arch):
|
||||||
@ -94,7 +94,7 @@ class CleanupRings(object):
|
|||||||
if self.bin2src[subpkg] == name:
|
if self.bin2src[subpkg] == name:
|
||||||
# different archs
|
# different archs
|
||||||
continue
|
continue
|
||||||
print('# Binary {} is defined twice: {} {}+{}'.format(subpkg, prj, name, self.bin2src[subpkg]))
|
print(f'# Binary {subpkg} is defined twice: {prj} {name}+{self.bin2src[subpkg]}')
|
||||||
self.bin2src[subpkg] = name
|
self.bin2src[subpkg] = name
|
||||||
|
|
||||||
def repo_state_acceptable(self, project):
|
def repo_state_acceptable(self, project):
|
||||||
@ -103,7 +103,7 @@ class CleanupRings(object):
|
|||||||
for repo in root.findall('result'):
|
for repo in root.findall('result'):
|
||||||
repostate = repo.get('state', 'missing')
|
repostate = repo.get('state', 'missing')
|
||||||
if repostate not in ['unpublished', 'published'] or repo.get('dirty', 'false') == 'true':
|
if repostate not in ['unpublished', 'published'] or repo.get('dirty', 'false') == 'true':
|
||||||
print('Repo {}/{} is in state {}'.format(repo.get('project'), repo.get('repository'), repostate))
|
print(f"Repo {repo.get('project')}/{repo.get('repository')} is in state {repostate}")
|
||||||
return False
|
return False
|
||||||
for package in repo.findall('status'):
|
for package in repo.findall('status'):
|
||||||
code = package.get('code')
|
code = package.get('code')
|
||||||
@ -121,16 +121,16 @@ class CleanupRings(object):
|
|||||||
url = makeurl(self.api.apiurl, ['build', project, 'images', arch, dvd, '_buildinfo'])
|
url = makeurl(self.api.apiurl, ['build', project, 'images', arch, dvd, '_buildinfo'])
|
||||||
root = ET.parse(http_GET(url)).getroot()
|
root = ET.parse(http_GET(url)).getroot()
|
||||||
# Don't delete the image itself
|
# Don't delete the image itself
|
||||||
self.pkgdeps[dvd.split(':')[0]] = 'MYdvd{}'.format(self.api.rings.index(project))
|
self.pkgdeps[dvd.split(':')[0]] = f'MYdvd{self.api.rings.index(project)}'
|
||||||
for bdep in root.findall('bdep'):
|
for bdep in root.findall('bdep'):
|
||||||
if 'name' not in bdep.attrib:
|
if 'name' not in bdep.attrib:
|
||||||
continue
|
continue
|
||||||
b = bdep.attrib['name']
|
b = bdep.attrib['name']
|
||||||
if b not in self.bin2src:
|
if b not in self.bin2src:
|
||||||
print("{} not found in bin2src".format(b))
|
print(f"{b} not found in bin2src")
|
||||||
continue
|
continue
|
||||||
b = self.bin2src[b]
|
b = self.bin2src[b]
|
||||||
self.pkgdeps[b] = 'MYdvd{}'.format(self.api.rings.index(project))
|
self.pkgdeps[b] = f'MYdvd{self.api.rings.index(project)}'
|
||||||
|
|
||||||
def check_buildconfig(self, project):
|
def check_buildconfig(self, project):
|
||||||
url = makeurl(self.api.apiurl, ['build', project, 'standard', '_buildconfig'])
|
url = makeurl(self.api.apiurl, ['build', project, 'standard', '_buildconfig'])
|
||||||
@ -260,6 +260,6 @@ class CleanupRings(object):
|
|||||||
if ":" in source:
|
if ":" in source:
|
||||||
self.commands.append(f"# Multibuild flavor {source} not needed")
|
self.commands.append(f"# Multibuild flavor {source} not needed")
|
||||||
else:
|
else:
|
||||||
self.commands.append('osc rdelete -m cleanup {} {}'.format(prj, source))
|
self.commands.append(f'osc rdelete -m cleanup {prj} {source}')
|
||||||
if nextprj:
|
if nextprj:
|
||||||
self.commands.append('osc linkpac {} {} {}'.format(self.api.project, source, nextprj))
|
self.commands.append(f'osc linkpac {self.api.project} {source} {nextprj}')
|
||||||
|
@ -173,7 +173,7 @@ class CommentAPI(object):
|
|||||||
for key, value in info.items():
|
for key, value in info.items():
|
||||||
infos.append('='.join((str(key), str(value))))
|
infos.append('='.join((str(key), str(value))))
|
||||||
|
|
||||||
marker = '<!-- {}{} -->'.format(bot, ' ' + ' '.join(infos) if info else '')
|
marker = f"<!-- {bot}{' ' + ' '.join(infos) if info else ''} -->"
|
||||||
return marker + '\n\n' + comment
|
return marker + '\n\n' + comment
|
||||||
|
|
||||||
def remove_marker(self, comment):
|
def remove_marker(self, comment):
|
||||||
|
@ -77,7 +77,7 @@ def get_request_list_with_history(
|
|||||||
xpath = ''
|
xpath = ''
|
||||||
if 'all' not in req_state:
|
if 'all' not in req_state:
|
||||||
for state in req_state:
|
for state in req_state:
|
||||||
xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
|
xpath = xpath_join(xpath, f'state/@name=\'{state}\'', inner=True)
|
||||||
if req_who:
|
if req_who:
|
||||||
xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
|
xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
|
||||||
|
|
||||||
@ -95,12 +95,12 @@ def get_request_list_with_history(
|
|||||||
xpath = xpath_join(xpath, xpath_base % {'kind': kind, 'val': val}, op='and', nexpr_parentheses=True)
|
xpath = xpath_join(xpath, xpath_base % {'kind': kind, 'val': val}, op='and', nexpr_parentheses=True)
|
||||||
|
|
||||||
if req_type:
|
if req_type:
|
||||||
xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
|
xpath = xpath_join(xpath, f'action/@type=\'{req_type}\'', op='and')
|
||||||
for i in exclude_target_projects:
|
for i in exclude_target_projects:
|
||||||
xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
|
xpath = xpath_join(xpath, f'(not(action/target/@project=\'{i}\'))', op='and')
|
||||||
|
|
||||||
if conf.config['verbose'] > 1:
|
if conf.config['verbose'] > 1:
|
||||||
print('[ %s ]' % xpath)
|
print(f'[ {xpath} ]')
|
||||||
queries = {}
|
queries = {}
|
||||||
queries['request'] = {'withfullhistory': '1'}
|
queries['request'] = {'withfullhistory': '1'}
|
||||||
res = osc_core_search(apiurl, queries=queries, request=xpath)
|
res = osc_core_search(apiurl, queries=queries, request=xpath)
|
||||||
@ -175,9 +175,9 @@ def project_role_expand(apiurl, project, role='maintainer'):
|
|||||||
|
|
||||||
|
|
||||||
def meta_role_expand(apiurl, meta, role='maintainer'):
|
def meta_role_expand(apiurl, meta, role='maintainer'):
|
||||||
users = meta.xpath('//person[@role="{}"]/@userid'.format(role))
|
users = meta.xpath(f'//person[@role="{role}"]/@userid')
|
||||||
|
|
||||||
groups = meta.xpath('//group[@role="{}"]/@groupid'.format(role))
|
groups = meta.xpath(f'//group[@role="{role}"]/@groupid')
|
||||||
users.extend(groups_members(apiurl, groups))
|
users.extend(groups_members(apiurl, groups))
|
||||||
|
|
||||||
return users
|
return users
|
||||||
@ -200,7 +200,7 @@ def package_list(apiurl, project, expand=True):
|
|||||||
@memoize(session=True)
|
@memoize(session=True)
|
||||||
def target_archs(apiurl, project, repository='standard'):
|
def target_archs(apiurl, project, repository='standard'):
|
||||||
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
||||||
return meta.xpath('repository[@name="{}"]/arch/text()'.format(repository))
|
return meta.xpath(f'repository[@name="{repository}"]/arch/text()')
|
||||||
|
|
||||||
|
|
||||||
@memoize(session=True)
|
@memoize(session=True)
|
||||||
@ -323,7 +323,7 @@ def devel_project_fallback(apiurl, target_project, target_package):
|
|||||||
def devel_projects(apiurl, project):
|
def devel_projects(apiurl, project):
|
||||||
devel_projects = set()
|
devel_projects = set()
|
||||||
|
|
||||||
root = search(apiurl, 'package', "@project='{}' and devel/@project!=''".format(project))
|
root = search(apiurl, 'package', f"@project='{project}' and devel/@project!=''")
|
||||||
for devel_project in root.xpath('package/devel/@project'):
|
for devel_project in root.xpath('package/devel/@project'):
|
||||||
if devel_project != project:
|
if devel_project != project:
|
||||||
devel_projects.add(devel_project)
|
devel_projects.add(devel_project)
|
||||||
@ -345,7 +345,7 @@ def request_age(request):
|
|||||||
|
|
||||||
def project_list_prefix(apiurl, prefix):
|
def project_list_prefix(apiurl, prefix):
|
||||||
"""Get a list of project with the same prefix."""
|
"""Get a list of project with the same prefix."""
|
||||||
query = {'match': 'starts-with(@name, "{}")'.format(prefix)}
|
query = {'match': f'starts-with(@name, "{prefix}")'}
|
||||||
url = makeurl(apiurl, ['search', 'project', 'id'], query)
|
url = makeurl(apiurl, ['search', 'project', 'id'], query)
|
||||||
root = ET.parse(http_GET(url)).getroot()
|
root = ET.parse(http_GET(url)).getroot()
|
||||||
return root.xpath('project/@name')
|
return root.xpath('project/@name')
|
||||||
@ -401,7 +401,7 @@ def entity_email(apiurl, key, entity_type='person', include_name=False):
|
|||||||
|
|
||||||
realname = root.find('realname')
|
realname = root.find('realname')
|
||||||
if include_name and realname is not None:
|
if include_name and realname is not None:
|
||||||
email = '{} <{}>'.format(realname.text, email)
|
email = f'{realname.text} <{email}>'
|
||||||
|
|
||||||
return email
|
return email
|
||||||
|
|
||||||
@ -506,8 +506,8 @@ def attribute_value_load(
|
|||||||
|
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
xpath_base = './attribute[@namespace="{}" and @name="{}"]'.format(namespace, name)
|
xpath_base = f'./attribute[@namespace="{namespace}" and @name="{name}"]'
|
||||||
value = root.xpath('{}/value/text()'.format(xpath_base))
|
value = root.xpath(f'{xpath_base}/value/text()')
|
||||||
if not len(value):
|
if not len(value):
|
||||||
if root.xpath(xpath_base):
|
if root.xpath(xpath_base):
|
||||||
# Handle boolean attributes that are present, but have no value.
|
# Handle boolean attributes that are present, but have no value.
|
||||||
@ -563,7 +563,7 @@ def repository_path_expand(apiurl: str, project: str, repo: str, visited_repos:
|
|||||||
visited_repos = set()
|
visited_repos = set()
|
||||||
repos = [[project, repo]]
|
repos = [[project, repo]]
|
||||||
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
||||||
paths = meta.findall('.//repository[@name="{}"]/path'.format(repo))
|
paths = meta.findall(f'.//repository[@name="{repo}"]/path')
|
||||||
|
|
||||||
# The listed paths are taken as-is, except for the last one...
|
# The listed paths are taken as-is, except for the last one...
|
||||||
for path in paths[:-1]:
|
for path in paths[:-1]:
|
||||||
@ -595,7 +595,7 @@ def repository_path_search(apiurl, project, search_project, search_repository):
|
|||||||
# Repositories for a single project are in a row so cache parsing.
|
# Repositories for a single project are in a row so cache parsing.
|
||||||
root = ET.fromstringlist(show_project_meta(apiurl, project))
|
root = ET.fromstringlist(show_project_meta(apiurl, project))
|
||||||
|
|
||||||
paths = root.findall('repository[@name="{}"]/path'.format(repository))
|
paths = root.findall(f'repository[@name="{repository}"]/path')
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if path.get('project') == search_project and path.get('repository') == search_repository:
|
if path.get('project') == search_project and path.get('repository') == search_repository:
|
||||||
return repository_top
|
return repository_top
|
||||||
@ -865,7 +865,7 @@ def package_version(apiurl, project, package):
|
|||||||
|
|
||||||
|
|
||||||
def project_attribute_list(apiurl, attribute, locked=None):
|
def project_attribute_list(apiurl, attribute, locked=None):
|
||||||
xpath = 'attribute/@name="{}"'.format(attribute)
|
xpath = f'attribute/@name="{attribute}"'
|
||||||
root = search(apiurl, 'project', xpath)
|
root = search(apiurl, 'project', xpath)
|
||||||
for project in root.xpath('project/@name'):
|
for project in root.xpath('project/@name'):
|
||||||
# Locked not exposed via OBS xpath engine.
|
# Locked not exposed via OBS xpath engine.
|
||||||
@ -922,7 +922,7 @@ def project_remote_prefixed(apiurl, apiurl_remote, project):
|
|||||||
if remote_apiurl == apiurl_remote:
|
if remote_apiurl == apiurl_remote:
|
||||||
return remote + ':' + project
|
return remote + ':' + project
|
||||||
|
|
||||||
raise Exception('remote APIURL interconnect not configured for{}'.format(apiurl_remote))
|
raise Exception(f'remote APIURL interconnect not configured for{apiurl_remote}')
|
||||||
|
|
||||||
|
|
||||||
def review_find_last(request, user, states=['all']):
|
def review_find_last(request, user, states=['all']):
|
||||||
@ -978,7 +978,7 @@ def issue_tracker_by_url(apiurl: str, tracker_url: str) -> Optional[str]:
|
|||||||
if not tracker_url.endswith('/'):
|
if not tracker_url.endswith('/'):
|
||||||
# All trackers are formatted with trailing slash.
|
# All trackers are formatted with trailing slash.
|
||||||
tracker_url += '/'
|
tracker_url += '/'
|
||||||
return next(iter(root.xpath('issue-tracker[url[text()="{}"]]'.format(tracker_url)) or []), None)
|
return next(iter(root.xpath(f'issue-tracker[url[text()="{tracker_url}"]]') or []), None)
|
||||||
|
|
||||||
|
|
||||||
def issue_tracker_label_apply(tracker, identifier):
|
def issue_tracker_label_apply(tracker, identifier):
|
||||||
@ -987,7 +987,7 @@ def issue_tracker_label_apply(tracker, identifier):
|
|||||||
|
|
||||||
def request_remote_identifier(apiurl: str, apiurl_remote: str, request_id: str) -> str:
|
def request_remote_identifier(apiurl: str, apiurl_remote: str, request_id: str) -> str:
|
||||||
if apiurl_remote == apiurl:
|
if apiurl_remote == apiurl:
|
||||||
return 'request#{}'.format(request_id)
|
return f'request#{request_id}'
|
||||||
|
|
||||||
# The URL differences make this rather convoluted.
|
# The URL differences make this rather convoluted.
|
||||||
tracker = issue_tracker_by_url(apiurl, apiurl_remote.replace('api.', 'build.'))
|
tracker = issue_tracker_by_url(apiurl, apiurl_remote.replace('api.', 'build.'))
|
||||||
@ -1085,7 +1085,7 @@ def request_action_list_maintenance_incident(apiurl, project, package, states=['
|
|||||||
# included in the search results. Overall, another prime example of design
|
# included in the search results. Overall, another prime example of design
|
||||||
# done completely and utterly wrong.
|
# done completely and utterly wrong.
|
||||||
|
|
||||||
package_repository = '{}.{}'.format(package, project.replace(':', '_'))
|
package_repository = f"{package}.{project.replace(':', '_')}"
|
||||||
|
|
||||||
# Loop over all maintenance projects and create selectors for the two
|
# Loop over all maintenance projects and create selectors for the two
|
||||||
# request states for the given project.
|
# request states for the given project.
|
||||||
@ -1099,7 +1099,7 @@ def request_action_list_maintenance_incident(apiurl, project, package, states=['
|
|||||||
|
|
||||||
xpath_project_package = ''
|
xpath_project_package = ''
|
||||||
xpath_project_package = xpath_join(
|
xpath_project_package = xpath_join(
|
||||||
xpath_project_package, 'action/source/@package="{}"'.format(package))
|
xpath_project_package, f'action/source/@package="{package}"')
|
||||||
xpath_project_package = xpath_join(
|
xpath_project_package = xpath_join(
|
||||||
xpath_project_package, 'action/source/@package="{}"'.format(
|
xpath_project_package, 'action/source/@package="{}"'.format(
|
||||||
package_repository), op='or', inner=True)
|
package_repository), op='or', inner=True)
|
||||||
@ -1117,12 +1117,12 @@ def request_action_list_maintenance_incident(apiurl, project, package, states=['
|
|||||||
|
|
||||||
xpath = xpath_join(xpath, xpath_project, op='or', nexpr_parentheses=True)
|
xpath = xpath_join(xpath, xpath_project, op='or', nexpr_parentheses=True)
|
||||||
|
|
||||||
xpath = '({})'.format(xpath)
|
xpath = f'({xpath})'
|
||||||
|
|
||||||
if 'all' not in states:
|
if 'all' not in states:
|
||||||
xpath_states = ''
|
xpath_states = ''
|
||||||
for state in states:
|
for state in states:
|
||||||
xpath_states = xpath_join(xpath_states, 'state/@name="{}"'.format(state), inner=True)
|
xpath_states = xpath_join(xpath_states, f'state/@name="{state}"', inner=True)
|
||||||
xpath = xpath_join(xpath, xpath_states, op='and', nexpr_parentheses=True)
|
xpath = xpath_join(xpath, xpath_states, op='and', nexpr_parentheses=True)
|
||||||
|
|
||||||
xpath = xpath_join(xpath, 'action/@type="maintenance_incident"', op='and')
|
xpath = xpath_join(xpath, 'action/@type="maintenance_incident"', op='and')
|
||||||
@ -1142,16 +1142,16 @@ def request_action_list_maintenance_incident(apiurl, project, package, states=['
|
|||||||
|
|
||||||
|
|
||||||
def request_action_list_maintenance_release(apiurl, project, package, states=['new', 'review']):
|
def request_action_list_maintenance_release(apiurl, project, package, states=['new', 'review']):
|
||||||
package_repository = '{}.{}'.format(package, project.replace(':', '_'))
|
package_repository = f"{package}.{project.replace(':', '_')}"
|
||||||
|
|
||||||
xpath = 'action/target/@project="{}"'.format(project)
|
xpath = f'action/target/@project="{project}"'
|
||||||
xpath = xpath_join(xpath, 'action/source/@package="{}"'.format(package_repository), op='and', inner=True)
|
xpath = xpath_join(xpath, f'action/source/@package="{package_repository}"', op='and', inner=True)
|
||||||
xpath = '({})'.format(xpath)
|
xpath = f'({xpath})'
|
||||||
|
|
||||||
if 'all' not in states:
|
if 'all' not in states:
|
||||||
xpath_states = ''
|
xpath_states = ''
|
||||||
for state in states:
|
for state in states:
|
||||||
xpath_states = xpath_join(xpath_states, 'state/@name="{}"'.format(state), inner=True)
|
xpath_states = xpath_join(xpath_states, f'state/@name="{state}"', inner=True)
|
||||||
xpath = xpath_join(xpath, xpath_states, op='and', nexpr_parentheses=True)
|
xpath = xpath_join(xpath, xpath_states, op='and', nexpr_parentheses=True)
|
||||||
|
|
||||||
xpath = xpath_join(xpath, 'action/@type="maintenance_release"', op='and')
|
xpath = xpath_join(xpath, 'action/@type="maintenance_release"', op='and')
|
||||||
@ -1257,7 +1257,7 @@ def request_create_delete(apiurl, target_project, target_package, message=None):
|
|||||||
def create_function():
|
def create_function():
|
||||||
return create_delete_request(apiurl, target_project, target_package, message)
|
return create_delete_request(apiurl, target_project, target_package, message)
|
||||||
|
|
||||||
return RequestFuture('delete {}/{}'.format(target_project, target_package), create_function)
|
return RequestFuture(f'delete {target_project}/{target_package}', create_function)
|
||||||
|
|
||||||
|
|
||||||
def request_create_change_devel(apiurl, source_project, source_package,
|
def request_create_change_devel(apiurl, source_project, source_package,
|
||||||
@ -1363,7 +1363,7 @@ class RequestFuture:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
request_id = self.create_tolerant()
|
request_id = self.create_tolerant()
|
||||||
print('{} = {}'.format(self, request_id))
|
print(f'{self} = {request_id}')
|
||||||
return request_id
|
return request_id
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -1383,9 +1383,9 @@ def add_description(request, text=None):
|
|||||||
|
|
||||||
def message_suffix(action, message=None):
|
def message_suffix(action, message=None):
|
||||||
if not message:
|
if not message:
|
||||||
message = '{} by OSRT tools'.format(action)
|
message = f'{action} by OSRT tools'
|
||||||
|
|
||||||
message += ' (host {})'.format(socket.gethostname())
|
message += f' (host {socket.gethostname()})'
|
||||||
return message
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ class CpioFile(object):
|
|||||||
fields = struct.unpack(fmt, buf[self.off:off])
|
fields = struct.unpack(fmt, buf[self.off:off])
|
||||||
|
|
||||||
if fields[0] != "070701":
|
if fields[0] != "070701":
|
||||||
raise Exception("invalid cpio header %s" % self.c_magic)
|
raise Exception(f"invalid cpio header {self.c_magic}")
|
||||||
|
|
||||||
names = ("c_ino", "c_mode", "c_uid", "c_gid",
|
names = ("c_ino", "c_mode", "c_uid", "c_gid",
|
||||||
"c_nlink", "c_mtime", "c_filesize",
|
"c_nlink", "c_mtime", "c_filesize",
|
||||||
|
@ -42,7 +42,7 @@ class FreezeCommand(object):
|
|||||||
self.create_bootstrap_aggregate_file()
|
self.create_bootstrap_aggregate_file()
|
||||||
|
|
||||||
def bootstrap_packages(self):
|
def bootstrap_packages(self):
|
||||||
url = self.api.makeurl(['build', '{}:0-Bootstrap'.format(self.api.crings), '_result'])
|
url = self.api.makeurl(['build', f'{self.api.crings}:0-Bootstrap', '_result'])
|
||||||
f = self.api.retried_GET(url)
|
f = self.api.retried_GET(url)
|
||||||
root = ET.parse(f).getroot().find('result')
|
root = ET.parse(f).getroot().find('result')
|
||||||
res = list()
|
res = list()
|
||||||
@ -58,7 +58,7 @@ class FreezeCommand(object):
|
|||||||
|
|
||||||
root = ET.Element('aggregatelist')
|
root = ET.Element('aggregatelist')
|
||||||
a = ET.SubElement(root, 'aggregate',
|
a = ET.SubElement(root, 'aggregate',
|
||||||
{'project': '{}:0-Bootstrap'.format(self.api.crings)})
|
{'project': f'{self.api.crings}:0-Bootstrap'})
|
||||||
|
|
||||||
for package in self.bootstrap_packages():
|
for package in self.bootstrap_packages():
|
||||||
p = ET.SubElement(a, 'package')
|
p = ET.SubElement(a, 'package')
|
||||||
@ -112,7 +112,7 @@ class FreezeCommand(object):
|
|||||||
if self.api.is_adi_project(prj):
|
if self.api.is_adi_project(prj):
|
||||||
src_prj = self.api.find_devel_project_from_adi_frozenlinks(self.prj)
|
src_prj = self.api.find_devel_project_from_adi_frozenlinks(self.prj)
|
||||||
if src_prj is None:
|
if src_prj is None:
|
||||||
raise Exception("{} does not have a valid frozenlinks".format(self.prj))
|
raise Exception(f"{self.prj} does not have a valid frozenlinks")
|
||||||
else:
|
else:
|
||||||
self.api.update_adi_frozenlinks(self.prj, src_prj)
|
self.api.update_adi_frozenlinks(self.prj, src_prj)
|
||||||
return
|
return
|
||||||
@ -150,7 +150,7 @@ class FreezeCommand(object):
|
|||||||
root = ET.Element('project', {'name': self.prj})
|
root = ET.Element('project', {'name': self.prj})
|
||||||
ET.SubElement(root, 'title')
|
ET.SubElement(root, 'title')
|
||||||
ET.SubElement(root, 'description')
|
ET.SubElement(root, 'description')
|
||||||
links = self.projectlinks or ['{}:1-MinimalX'.format(self.api.crings)]
|
links = self.projectlinks or [f'{self.api.crings}:1-MinimalX']
|
||||||
for lprj in links:
|
for lprj in links:
|
||||||
ET.SubElement(root, 'link', {'project': lprj})
|
ET.SubElement(root, 'link', {'project': lprj})
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ def describe(directory=None):
|
|||||||
def clone(url, directory):
|
def clone(url, directory):
|
||||||
return_code = subprocess.call(['git', 'clone', url, directory])
|
return_code = subprocess.call(['git', 'clone', url, directory])
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
raise Exception('Failed to clone {}'.format(url))
|
raise Exception(f'Failed to clone {url}')
|
||||||
|
|
||||||
|
|
||||||
def sync(cache_dir, repo_url, message=None):
|
def sync(cache_dir, repo_url, message=None):
|
||||||
@ -54,7 +54,7 @@ def sync(cache_dir, repo_url, message=None):
|
|||||||
os.chdir(repo_dir)
|
os.chdir(repo_dir)
|
||||||
return_code = subprocess.call([git_sync_exec])
|
return_code = subprocess.call([git_sync_exec])
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
raise Exception('failed to sync {}'.format(repo_name))
|
raise Exception(f'failed to sync {repo_name}')
|
||||||
|
|
||||||
os.chdir(cwd)
|
os.chdir(cwd)
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ class IgnoreCommand(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
for request_id in RequestFinder.find_sr(requests, self.api):
|
for request_id in RequestFinder.find_sr(requests, self.api):
|
||||||
print('{}: ignored'.format(request_id))
|
print(f'{request_id}: ignored')
|
||||||
comment = message if message else self.MESSAGE
|
comment = message if message else self.MESSAGE
|
||||||
self.api.add_ignored_request(request_id, comment)
|
self.api.add_ignored_request(request_id, comment)
|
||||||
self.comment.add_comment(request_id=str(request_id), comment=comment)
|
self.comment.add_comment(request_id=str(request_id), comment=comment)
|
||||||
|
@ -52,7 +52,7 @@ class ListCommand:
|
|||||||
if not hide_source and action.find('source') is not None:
|
if not hide_source and action.find('source') is not None:
|
||||||
source_project = action.find('source').get('project')
|
source_project = action.find('source').get('project')
|
||||||
source_project = self.project_strip(source_project)
|
source_project = self.project_strip(source_project)
|
||||||
line += ' ({})'.format(Fore.YELLOW + source_project + Fore.RESET)
|
line += f' ({Fore.YELLOW + source_project + Fore.RESET})'
|
||||||
if action.get('type') == 'delete':
|
if action.get('type') == 'delete':
|
||||||
line += ' (' + Fore.RED + 'delete request' + Fore.RESET + ')'
|
line += ' (' + Fore.RED + 'delete request' + Fore.RESET + ')'
|
||||||
|
|
||||||
@ -72,10 +72,10 @@ class ListCommand:
|
|||||||
splitter.stageable = False
|
splitter.stageable = False
|
||||||
for request_type in ('change_devel', 'set_bugowner'):
|
for request_type in ('change_devel', 'set_bugowner'):
|
||||||
splitter.reset()
|
splitter.reset()
|
||||||
splitter.filter_add('./action[@type="{}"]'.format(request_type))
|
splitter.filter_add(f'./action[@type="{request_type}"]')
|
||||||
requests = splitter.filter_only()
|
requests = splitter.filter_only()
|
||||||
if len(requests):
|
if len(requests):
|
||||||
print('\n{} request(s)'.format(request_type))
|
print(f'\n{request_type} request(s)')
|
||||||
for request in sorted(requests, key=lambda s: s.get('id')):
|
for request in sorted(requests, key=lambda s: s.get('id')):
|
||||||
print(' {} {}'.format(
|
print(' {} {}'.format(
|
||||||
self.api.makeurl(['request', 'show', request.get('id')]),
|
self.api.makeurl(['request', 'show', request.get('id')]),
|
||||||
|
@ -131,7 +131,7 @@ def memoize(ttl=None, session=False, add_invalidate=False):
|
|||||||
cache.clear()
|
cache.clear()
|
||||||
|
|
||||||
def _add_invalidate_method(_self):
|
def _add_invalidate_method(_self):
|
||||||
name = '_invalidate_%s' % fn.__name__
|
name = f'_invalidate_{fn.__name__}'
|
||||||
if not hasattr(_self, name):
|
if not hasattr(_self, name):
|
||||||
setattr(_self, name, _invalidate)
|
setattr(_self, name, _invalidate)
|
||||||
|
|
||||||
|
@ -31,9 +31,9 @@ class OBSLock(object):
|
|||||||
"""Create a signature with a timestamp."""
|
"""Create a signature with a timestamp."""
|
||||||
reason = str(self.reason)
|
reason = str(self.reason)
|
||||||
if self.reason_sub:
|
if self.reason_sub:
|
||||||
reason += ' ({})'.format(self.reason_sub)
|
reason += f' ({self.reason_sub})'
|
||||||
reason = reason.replace('@', 'at').replace('#', 'hash')
|
reason = reason.replace('@', 'at').replace('#', 'hash')
|
||||||
return '%s#%s@%s' % (self.user, reason, datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f'))
|
return f"{self.user}#{reason}@{datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')}"
|
||||||
|
|
||||||
def _parse(self, signature):
|
def _parse(self, signature):
|
||||||
"""Parse a signature into an user and a timestamp."""
|
"""Parse a signature into an user and a timestamp."""
|
||||||
@ -50,7 +50,7 @@ class OBSLock(object):
|
|||||||
return user, reason, reason_sub, ts
|
return user, reason, reason_sub, ts
|
||||||
|
|
||||||
def _read(self):
|
def _read(self):
|
||||||
url = makeurl(self.apiurl, ['source', self.lock, '_attribute', '%s:LockedBy' % self.ns])
|
url = makeurl(self.apiurl, ['source', self.lock, '_attribute', f'{self.ns}:LockedBy'])
|
||||||
try:
|
try:
|
||||||
root = ET.parse(http_GET(url)).getroot()
|
root = ET.parse(http_GET(url)).getroot()
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
@ -66,12 +66,12 @@ class OBSLock(object):
|
|||||||
|
|
||||||
def _write(self, signature):
|
def _write(self, signature):
|
||||||
url = makeurl(self.apiurl, ['source', self.lock, '_attribute'])
|
url = makeurl(self.apiurl, ['source', self.lock, '_attribute'])
|
||||||
data = """
|
data = f"""
|
||||||
<attributes>
|
<attributes>
|
||||||
<attribute namespace='%s' name='LockedBy'>
|
<attribute namespace='{self.ns}' name='LockedBy'>
|
||||||
<value>%s</value>
|
<value>{signature}</value>
|
||||||
</attribute>
|
</attribute>
|
||||||
</attributes>""" % (self.ns, signature)
|
</attributes>"""
|
||||||
http_POST(url, data=data)
|
http_POST(url, data=data)
|
||||||
|
|
||||||
def acquire(self):
|
def acquire(self):
|
||||||
@ -88,7 +88,7 @@ class OBSLock(object):
|
|||||||
if user and ts:
|
if user and ts:
|
||||||
now = datetime.utcnow()
|
now = datetime.utcnow()
|
||||||
if now < ts:
|
if now < ts:
|
||||||
raise Exception('Lock acquired from the future [%s] by [%s]. Try later.' % (ts, user))
|
raise Exception(f'Lock acquired from the future [{ts}] by [{user}]. Try later.')
|
||||||
delta = now - ts
|
delta = now - ts
|
||||||
if delta.total_seconds() < self.ttl:
|
if delta.total_seconds() < self.ttl:
|
||||||
# Existing lock that has not expired.
|
# Existing lock that has not expired.
|
||||||
@ -103,14 +103,14 @@ class OBSLock(object):
|
|||||||
stop = False
|
stop = False
|
||||||
|
|
||||||
if stop:
|
if stop:
|
||||||
print('Lock acquired by [%s] %s ago, reason <%s>. Try later.' % (user, delta, reason))
|
print(f'Lock acquired by [{user}] {delta} ago, reason <{reason}>. Try later.')
|
||||||
exit(-1)
|
exit(-1)
|
||||||
self._write(self._signature())
|
self._write(self._signature())
|
||||||
|
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
user, _, _, _ = self._parse(self._read())
|
user, _, _, _ = self._parse(self._read())
|
||||||
if user != self.user:
|
if user != self.user:
|
||||||
raise Exception('Race condition, [%s] wins. Try later.' % user)
|
raise Exception(f'Race condition, [{user}] wins. Try later.')
|
||||||
self.locked = True
|
self.locked = True
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
@ -208,7 +208,7 @@ def config_resolve_variables(config, config_project):
|
|||||||
|
|
||||||
|
|
||||||
def config_resolve_variable(value, config_project, key='config'):
|
def config_resolve_variable(value, config_project, key='config'):
|
||||||
prefix = '<{}:'.format(key)
|
prefix = f'<{key}:'
|
||||||
end = value.rfind('>')
|
end = value.rfind('>')
|
||||||
if not value.startswith(prefix) or end == -1:
|
if not value.startswith(prefix) or end == -1:
|
||||||
return value
|
return value
|
||||||
@ -488,7 +488,7 @@ def policy_get_preprocess(apiurl, origin, policy):
|
|||||||
project = origin.rstrip('~')
|
project = origin.rstrip('~')
|
||||||
config_project = Config.get(apiurl, project)
|
config_project = Config.get(apiurl, project)
|
||||||
for suffix in ('', '_update'):
|
for suffix in ('', '_update'):
|
||||||
key = 'pending_submission_allowed_reviews{}'.format(suffix)
|
key = f'pending_submission_allowed_reviews{suffix}'
|
||||||
policy[key] = list(filter(None, [
|
policy[key] = list(filter(None, [
|
||||||
config_resolve_variable(v, config_project, 'config_source')
|
config_resolve_variable(v, config_project, 'config_source')
|
||||||
for v in policy[key]]))
|
for v in policy[key]]))
|
||||||
@ -573,7 +573,7 @@ def policy_input_evaluate(policy, inputs) -> PolicyResult:
|
|||||||
result.reviews['fallback'] = 'Changing to a higher priority origin, but from another family.'
|
result.reviews['fallback'] = 'Changing to a higher priority origin, but from another family.'
|
||||||
elif inputs['direction'] != 'forward':
|
elif inputs['direction'] != 'forward':
|
||||||
result.reviews['fallback'] = \
|
result.reviews['fallback'] = \
|
||||||
'Changing to a higher priority origin, but {} direction.'.format(inputs['direction'])
|
f"Changing to a higher priority origin, but {inputs['direction']} direction."
|
||||||
else:
|
else:
|
||||||
result.reviews['fallback'] = 'Changing to a lower priority origin.'
|
result.reviews['fallback'] = 'Changing to a lower priority origin.'
|
||||||
else:
|
else:
|
||||||
@ -584,7 +584,7 @@ def policy_input_evaluate(policy, inputs) -> PolicyResult:
|
|||||||
if not policy['automatic_updates']:
|
if not policy['automatic_updates']:
|
||||||
result.reviews['fallback'] = 'Forward direction, but automatic updates not allowed.'
|
result.reviews['fallback'] = 'Forward direction, but automatic updates not allowed.'
|
||||||
else:
|
else:
|
||||||
result.reviews['fallback'] = '{} direction.'.format(inputs['direction'])
|
result.reviews['fallback'] = f"{inputs['direction']} direction."
|
||||||
|
|
||||||
if inputs['pending_submission'] is not False:
|
if inputs['pending_submission'] is not False:
|
||||||
reviews_not_allowed = policy_input_evaluate_reviews_not_allowed(policy, inputs)
|
reviews_not_allowed = policy_input_evaluate_reviews_not_allowed(policy, inputs)
|
||||||
@ -822,7 +822,7 @@ def origin_update_pending(apiurl, origin_project, package, target_project, polic
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
identifier = request_remote_identifier(apiurl, apiurl_remote, request.reqid)
|
identifier = request_remote_identifier(apiurl, apiurl_remote, request.reqid)
|
||||||
message = 'Newer pending source available from package origin. See {}.'.format(identifier)
|
message = f'Newer pending source available from package origin. See {identifier}.'
|
||||||
src_project = project_remote_prefixed(apiurl, apiurl_remote, action.src_project)
|
src_project = project_remote_prefixed(apiurl, apiurl_remote, action.src_project)
|
||||||
return request_create_submit(apiurl, src_project, action.src_package,
|
return request_create_submit(apiurl, src_project, action.src_package,
|
||||||
target_project, package, message=message, revision=action.src_rev,
|
target_project, package, message=message, revision=action.src_rev,
|
||||||
@ -834,7 +834,7 @@ def origin_update_pending(apiurl, origin_project, package, target_project, polic
|
|||||||
def origin_update_mode(apiurl, target_project, package, policy, origin_project):
|
def origin_update_mode(apiurl, target_project, package, policy, origin_project):
|
||||||
values = {}
|
values = {}
|
||||||
for key in ('skip', 'supersede', 'delay', 'frequency'):
|
for key in ('skip', 'supersede', 'delay', 'frequency'):
|
||||||
attribute = 'OriginUpdate{}'.format(key.capitalize())
|
attribute = f'OriginUpdate{key.capitalize()}'
|
||||||
for project in (origin_project, target_project):
|
for project in (origin_project, target_project):
|
||||||
for package_attribute in (package, None):
|
for package_attribute in (package, None):
|
||||||
value = attribute_value_load(apiurl, project, attribute, package=package_attribute)
|
value = attribute_value_load(apiurl, project, attribute, package=package_attribute)
|
||||||
|
@ -13,7 +13,7 @@ class PrioCommand(object):
|
|||||||
:param project: project to check
|
:param project: project to check
|
||||||
|
|
||||||
"""
|
"""
|
||||||
message = 'raising priority for %s' % status.get('name')
|
message = f"raising priority for {status.get('name')}"
|
||||||
for r in status.findall('missing_reviews/review'):
|
for r in status.findall('missing_reviews/review'):
|
||||||
reqid = r.get('request')
|
reqid = r.get('request')
|
||||||
req = osc.core.get_request(self.api.apiurl, reqid)
|
req = osc.core.get_request(self.api.apiurl, reqid)
|
||||||
|
@ -16,10 +16,10 @@ class RepairCommand(object):
|
|||||||
req = get_request(self.api.apiurl, reqid)
|
req = get_request(self.api.apiurl, reqid)
|
||||||
|
|
||||||
if not req:
|
if not req:
|
||||||
raise oscerr.WrongArgs('Request {} not found'.format(reqid))
|
raise oscerr.WrongArgs(f'Request {reqid} not found')
|
||||||
|
|
||||||
if req.state.name != 'review':
|
if req.state.name != 'review':
|
||||||
print('Request "{}" is not in review state'.format(reqid))
|
print(f'Request "{reqid}" is not in review state')
|
||||||
return
|
return
|
||||||
|
|
||||||
reviews = [r.by_project for r in req.reviews if ':Staging:' in str(r.by_project) and r.state == 'new']
|
reviews = [r.by_project for r in req.reviews if ':Staging:' in str(r.by_project) and r.state == 'new']
|
||||||
@ -27,9 +27,9 @@ class RepairCommand(object):
|
|||||||
if reviews:
|
if reviews:
|
||||||
if len(reviews) > 1:
|
if len(reviews) > 1:
|
||||||
raise oscerr.WrongArgs(
|
raise oscerr.WrongArgs(
|
||||||
'Request {} had multiple review opened by different staging project'.format(reqid))
|
f'Request {reqid} had multiple review opened by different staging project')
|
||||||
else:
|
else:
|
||||||
raise oscerr.WrongArgs('Request {} is not for staging project'.format(reqid))
|
raise oscerr.WrongArgs(f'Request {reqid} is not for staging project')
|
||||||
|
|
||||||
staging_project = reviews[0]
|
staging_project = reviews[0]
|
||||||
try:
|
try:
|
||||||
@ -42,15 +42,15 @@ class RepairCommand(object):
|
|||||||
if data is not None:
|
if data is not None:
|
||||||
for request in data.findall('staged_requests/requests'):
|
for request in data.findall('staged_requests/requests'):
|
||||||
if request.get('id') == reqid:
|
if request.get('id') == reqid:
|
||||||
print('Request "{}" had the good setup in "{}"'.format(reqid, staging_project))
|
print(f'Request "{reqid}" had the good setup in "{staging_project}"')
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
# this situation should only happen on adi staging
|
# this situation should only happen on adi staging
|
||||||
print('Project is not exist, re-creating "{}"'.format(staging_project))
|
print(f'Project is not exist, re-creating "{staging_project}"')
|
||||||
self.api.create_adi_project(staging_project)
|
self.api.create_adi_project(staging_project)
|
||||||
|
|
||||||
# a bad request setup found
|
# a bad request setup found
|
||||||
print('Repairing "{}"'.format(reqid))
|
print(f'Repairing "{reqid}"')
|
||||||
change_review_state(self.api.apiurl, reqid, newstate='accepted',
|
change_review_state(self.api.apiurl, reqid, newstate='accepted',
|
||||||
message='Re-evaluation needed', by_project=staging_project)
|
message='Re-evaluation needed', by_project=staging_project)
|
||||||
self.api.add_review(reqid, by_group=self.api.cstaging_group, msg='Requesting new staging review')
|
self.api.add_review(reqid, by_group=self.api.cstaging_group, msg='Requesting new staging review')
|
||||||
@ -64,7 +64,7 @@ class RepairCommand(object):
|
|||||||
if cleanup:
|
if cleanup:
|
||||||
untracked = self.api.project_status_requests('untracked')
|
untracked = self.api.project_status_requests('untracked')
|
||||||
if len(untracked) > 0:
|
if len(untracked) > 0:
|
||||||
print('Cleanup {} untracked requests'.format(len(untracked)))
|
print(f'Cleanup {len(untracked)} untracked requests')
|
||||||
packages += tuple(untracked)
|
packages += tuple(untracked)
|
||||||
|
|
||||||
for reqid in RequestFinder.find_sr(packages, self.api):
|
for reqid in RequestFinder.find_sr(packages, self.api):
|
||||||
|
@ -29,7 +29,7 @@ class CorruptRepos(Exception):
|
|||||||
|
|
||||||
|
|
||||||
def _format_pkg(sp):
|
def _format_pkg(sp):
|
||||||
return "{}-{}-{}.{}".format(sp[0], sp[1], sp[2], sp[3])
|
return f"{sp[0]}-{sp[1]}-{sp[2]}.{sp[3]}"
|
||||||
|
|
||||||
|
|
||||||
def _check_exists_in_whitelist(sp, whitelist):
|
def _check_exists_in_whitelist(sp, whitelist):
|
||||||
@ -37,7 +37,7 @@ def _check_exists_in_whitelist(sp, whitelist):
|
|||||||
logger.debug("Found %s in whitelist, ignoring", sp[0])
|
logger.debug("Found %s in whitelist, ignoring", sp[0])
|
||||||
return True
|
return True
|
||||||
# check with version
|
# check with version
|
||||||
long_name = "{}-{}".format(sp[0], sp[1])
|
long_name = f"{sp[0]}-{sp[1]}"
|
||||||
if long_name in whitelist:
|
if long_name in whitelist:
|
||||||
logger.debug("Found %s in whitelist, ignoring", long_name)
|
logger.debug("Found %s in whitelist, ignoring", long_name)
|
||||||
return True
|
return True
|
||||||
@ -48,7 +48,7 @@ def _check_exists_in_whitelist(sp, whitelist):
|
|||||||
|
|
||||||
|
|
||||||
def _check_colon_format(sp1, sp2, whitelist):
|
def _check_colon_format(sp1, sp2, whitelist):
|
||||||
if "{}:{}".format(sp1, sp2) in whitelist:
|
if f"{sp1}:{sp2}" in whitelist:
|
||||||
logger.debug("Found %s:%s in whitelist, ignoring", sp1, sp2)
|
logger.debug("Found %s:%s in whitelist, ignoring", sp1, sp2)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -114,9 +114,9 @@ def _fileconflicts(pfile, arch, target_packages, whitelist):
|
|||||||
logger.debug("Packages %s and %s with conflicting files conflict", pkgcanon1, pkgcanon2)
|
logger.debug("Packages %s and %s with conflicting files conflict", pkgcanon1, pkgcanon2)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
output += "found conflict of {} with {}\n".format(_format_pkg(sp1), _format_pkg(sp2))
|
output += f"found conflict of {_format_pkg(sp1)} with {_format_pkg(sp2)}\n"
|
||||||
for file in conflict['conflicts'].split('\n'):
|
for file in conflict['conflicts'].split('\n'):
|
||||||
output += " {}\n".format(file)
|
output += f" {file}\n"
|
||||||
output += "\n"
|
output += "\n"
|
||||||
|
|
||||||
if len(output):
|
if len(output):
|
||||||
@ -162,7 +162,7 @@ def parsed_installcheck(repos, arch, target_packages, whitelist):
|
|||||||
if package not in target_packages:
|
if package not in target_packages:
|
||||||
continue
|
continue
|
||||||
if package in whitelist:
|
if package in whitelist:
|
||||||
logger.debug("{} fails installcheck but is white listed".format(package))
|
logger.debug(f"{package} fails installcheck but is white listed")
|
||||||
continue
|
continue
|
||||||
reported_problems[package] = {'problem': match.group(
|
reported_problems[package] = {'problem': match.group(
|
||||||
1) + match.group(2), 'output': [], 'source': target_packages[package]}
|
1) + match.group(2), 'output': [], 'source': target_packages[package]}
|
||||||
@ -212,7 +212,7 @@ def installcheck(directories, arch, whitelist, ignore_conflicts):
|
|||||||
|
|
||||||
def mirrorRepomd(cachedir, url):
|
def mirrorRepomd(cachedir, url):
|
||||||
# Use repomd.xml to get the location of primary.xml.*
|
# Use repomd.xml to get the location of primary.xml.*
|
||||||
repoindex = ET.fromstring(requests.get('{}/repodata/repomd.xml'.format(url)).content)
|
repoindex = ET.fromstring(requests.get(f'{url}/repodata/repomd.xml').content)
|
||||||
primarypath = repoindex.xpath("string(./repo:data[@type='primary']/repo:location/@href)",
|
primarypath = repoindex.xpath("string(./repo:data[@type='primary']/repo:location/@href)",
|
||||||
namespaces={'repo': 'http://linux.duke.edu/metadata/repo'})
|
namespaces={'repo': 'http://linux.duke.edu/metadata/repo'})
|
||||||
|
|
||||||
@ -239,18 +239,18 @@ def mirror(apiurl, project, repository, arch):
|
|||||||
os.makedirs(directory)
|
os.makedirs(directory)
|
||||||
|
|
||||||
meta = ET.parse(http_GET(makeurl(apiurl, ['source', project, '_meta']))).getroot()
|
meta = ET.parse(http_GET(makeurl(apiurl, ['source', project, '_meta']))).getroot()
|
||||||
repotag = meta.xpath("/project/repository[@name='{}']".format(repository))[0]
|
repotag = meta.xpath(f"/project/repository[@name='{repository}']")[0]
|
||||||
if arch not in repotag.xpath("./arch/text()"):
|
if arch not in repotag.xpath("./arch/text()"):
|
||||||
# Arch not in this project, skip mirroring
|
# Arch not in this project, skip mirroring
|
||||||
return directory
|
return directory
|
||||||
|
|
||||||
download = repotag.xpath("./download[@arch='{}']".format(arch))
|
download = repotag.xpath(f"./download[@arch='{arch}']")
|
||||||
if download is not None and len(download) > 0:
|
if download is not None and len(download) > 0:
|
||||||
if len(download) > 1:
|
if len(download) > 1:
|
||||||
raise Exception('Multiple download urls unsupported')
|
raise Exception('Multiple download urls unsupported')
|
||||||
repotype = download[0].get('repotype')
|
repotype = download[0].get('repotype')
|
||||||
if repotype != 'rpmmd':
|
if repotype != 'rpmmd':
|
||||||
raise Exception('repotype {} not supported'.format(repotype))
|
raise Exception(f'repotype {repotype} not supported')
|
||||||
return mirrorRepomd(directory, download[0].get('url'))
|
return mirrorRepomd(directory, download[0].get('url'))
|
||||||
|
|
||||||
rm = RepoMirror(apiurl)
|
rm = RepoMirror(apiurl)
|
||||||
|
@ -109,7 +109,7 @@ class RequestFinder(object):
|
|||||||
:param newcand: the review state of staging-group must be new
|
:param newcand: the review state of staging-group must be new
|
||||||
"""
|
"""
|
||||||
|
|
||||||
query = 'types=submit,delete&states=new,review&project={}&view=collection'.format(self.api.project)
|
query = f'types=submit,delete&states=new,review&project={self.api.project}&view=collection'
|
||||||
url = makeurl(self.api.apiurl, ['request'], query)
|
url = makeurl(self.api.apiurl, ['request'], query)
|
||||||
f = http_GET(url)
|
f = http_GET(url)
|
||||||
root = ET.parse(f).getroot()
|
root = ET.parse(f).getroot()
|
||||||
@ -149,7 +149,7 @@ class RequestFinder(object):
|
|||||||
continue
|
continue
|
||||||
if consider_stagings and self.find_staging_project(p):
|
if consider_stagings and self.find_staging_project(p):
|
||||||
continue
|
continue
|
||||||
raise oscerr.WrongArgs('No SR# found for: {}'.format(p))
|
raise oscerr.WrongArgs(f'No SR# found for: {p}')
|
||||||
|
|
||||||
def find_via_stagingapi(self, pkgs):
|
def find_via_stagingapi(self, pkgs):
|
||||||
"""
|
"""
|
||||||
@ -173,7 +173,7 @@ class RequestFinder(object):
|
|||||||
found = True
|
found = True
|
||||||
break
|
break
|
||||||
if not found:
|
if not found:
|
||||||
raise oscerr.WrongArgs('No SR# found for: {}'.format(p))
|
raise oscerr.WrongArgs(f'No SR# found for: {p}')
|
||||||
|
|
||||||
def find_staging_project(self, project):
|
def find_staging_project(self, project):
|
||||||
"""
|
"""
|
||||||
|
@ -47,7 +47,7 @@ class RequestSplitter(object):
|
|||||||
def strategy_set(self, name, **kwargs):
|
def strategy_set(self, name, **kwargs):
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
class_name = 'Strategy{}'.format(name.lower().title())
|
class_name = f'Strategy{name.lower().title()}'
|
||||||
cls = globals()[class_name]
|
cls = globals()[class_name]
|
||||||
self.strategy = cls(**kwargs)
|
self.strategy = cls(**kwargs)
|
||||||
self.strategy.apply(self)
|
self.strategy.apply(self)
|
||||||
@ -276,7 +276,7 @@ class RequestSplitter(object):
|
|||||||
|
|
||||||
def requests_assign(self, group, staging, merge=False):
|
def requests_assign(self, group, staging, merge=False):
|
||||||
# Arbitrary, but descriptive group key for proposal.
|
# Arbitrary, but descriptive group key for proposal.
|
||||||
key = '{}#{}@{}'.format(len(self.proposal), self.strategy.key, group)
|
key = f'{len(self.proposal)}#{self.strategy.key}@{group}'
|
||||||
self.proposal[key] = {
|
self.proposal[key] = {
|
||||||
'bootstrap_required': self.grouped[group]['bootstrap_required'],
|
'bootstrap_required': self.grouped[group]['bootstrap_required'],
|
||||||
'group': group,
|
'group': group,
|
||||||
|
@ -67,7 +67,7 @@ class SelectCommand(object):
|
|||||||
|
|
||||||
if request not in staged_requests and not supersede:
|
if request not in staged_requests and not supersede:
|
||||||
# Normal 'select' command
|
# Normal 'select' command
|
||||||
print('Adding request "{}" to project "{}"'.format(request, self.target_project))
|
print(f'Adding request "{request}" to project "{self.target_project}"')
|
||||||
|
|
||||||
return self.api.rq_to_prj(request, self.target_project, remove_exclusion)
|
return self.api.rq_to_prj(request, self.target_project, remove_exclusion)
|
||||||
elif request in staged_requests and (move or supersede):
|
elif request in staged_requests and (move or supersede):
|
||||||
@ -75,17 +75,17 @@ class SelectCommand(object):
|
|||||||
# supersede = (new_rq, package, project)
|
# supersede = (new_rq, package, project)
|
||||||
fprj = self.api.packages_staged[staged_requests[request]]['prj'] if not supersede else supersede[2]
|
fprj = self.api.packages_staged[staged_requests[request]]['prj'] if not supersede else supersede[2]
|
||||||
if filter_from and filter_from != fprj:
|
if filter_from and filter_from != fprj:
|
||||||
print('Ignoring "{}" in "{}" since not in "{}"'.format(request, fprj, filter_from))
|
print(f'Ignoring "{request}" in "{fprj}" since not in "{filter_from}"')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if supersede:
|
if supersede:
|
||||||
print('"{} ({}) is superseded by {}'.format(request, supersede[1], supersede[0]))
|
print(f'"{request} ({supersede[1]}) is superseded by {supersede[0]}')
|
||||||
|
|
||||||
if fprj == self.target_project:
|
if fprj == self.target_project:
|
||||||
print('"{}" is currently in "{}"'.format(request, self.target_project))
|
print(f'"{request}" is currently in "{self.target_project}"')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
print('Moving "{}" from "{}" to "{}"'.format(request, fprj, self.target_project))
|
print(f'Moving "{request}" from "{fprj}" to "{self.target_project}"')
|
||||||
|
|
||||||
# Store the source project, we also need to write a comment there
|
# Store the source project, we also need to write a comment there
|
||||||
self.affected_projects.add(fprj)
|
self.affected_projects.add(fprj)
|
||||||
@ -102,7 +102,7 @@ class SelectCommand(object):
|
|||||||
print(msg)
|
print(msg)
|
||||||
return True
|
return True
|
||||||
elif supersede:
|
elif supersede:
|
||||||
print('"{} ({}) supersedes {}'.format(request, supersede[1], supersede[0]))
|
print(f'"{request} ({supersede[1]}) supersedes {supersede[0]}')
|
||||||
else:
|
else:
|
||||||
raise oscerr.WrongArgs('Arguments for select are not correct.')
|
raise oscerr.WrongArgs('Arguments for select are not correct.')
|
||||||
|
|
||||||
@ -132,7 +132,7 @@ class SelectCommand(object):
|
|||||||
requests = RequestFinder.find_sr(requests, self.api, newcand, consider_stagings=move)
|
requests = RequestFinder.find_sr(requests, self.api, newcand, consider_stagings=move)
|
||||||
requests_count = len(requests)
|
requests_count = len(requests)
|
||||||
for index, request in enumerate(requests, start=1):
|
for index, request in enumerate(requests, start=1):
|
||||||
print('({}/{}) '.format(index, requests_count), end='')
|
print(f'({index}/{requests_count}) ', end='')
|
||||||
if not self.select_request(request, move, filter_from, remove_exclusion=remove_exclusion):
|
if not self.select_request(request, move, filter_from, remove_exclusion=remove_exclusion):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -97,8 +97,8 @@ class StagingAPI(object):
|
|||||||
# If the project support rings, inititialize some variables.
|
# If the project support rings, inititialize some variables.
|
||||||
if self.crings:
|
if self.crings:
|
||||||
self._rings = (
|
self._rings = (
|
||||||
'{}:0-Bootstrap'.format(self.crings),
|
f'{self.crings}:0-Bootstrap',
|
||||||
'{}:1-MinimalX'.format(self.crings)
|
f'{self.crings}:1-MinimalX'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self._rings = []
|
self._rings = []
|
||||||
@ -167,9 +167,9 @@ class StagingAPI(object):
|
|||||||
return func(url)
|
return func(url)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
if 500 <= e.code <= 599:
|
if 500 <= e.code <= 599:
|
||||||
print('Error {}, retrying {} in {}s'.format(e.code, url, retry_sleep_seconds))
|
print(f'Error {e.code}, retrying {url} in {retry_sleep_seconds}s')
|
||||||
elif e.code == 400 and e.reason == 'service in progress':
|
elif e.code == 400 and e.reason == 'service in progress':
|
||||||
print('Service in progress, retrying {} in {}s'.format(url, retry_sleep_seconds))
|
print(f'Service in progress, retrying {url} in {retry_sleep_seconds}s')
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
time.sleep(retry_sleep_seconds)
|
time.sleep(retry_sleep_seconds)
|
||||||
@ -327,7 +327,7 @@ class StagingAPI(object):
|
|||||||
def prj_from_short(self, name):
|
def prj_from_short(self, name):
|
||||||
if name.startswith(self.cstaging):
|
if name.startswith(self.cstaging):
|
||||||
return name
|
return name
|
||||||
return '{}:{}'.format(self.cstaging, name)
|
return f'{self.cstaging}:{name}'
|
||||||
|
|
||||||
def get_staging_projects_short(self, adi=False):
|
def get_staging_projects_short(self, adi=False):
|
||||||
"""
|
"""
|
||||||
@ -381,7 +381,7 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
req = get_request(self.apiurl, str(request_id))
|
req = get_request(self.apiurl, str(request_id))
|
||||||
if not req:
|
if not req:
|
||||||
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
raise oscerr.WrongArgs(f'Request {request_id} not found')
|
||||||
|
|
||||||
for review in req.reviews:
|
for review in req.reviews:
|
||||||
if review.by_group == by_group and \
|
if review.by_group == by_group and \
|
||||||
@ -434,7 +434,7 @@ class StagingAPI(object):
|
|||||||
request_id = int(request.get('id'))
|
request_id = int(request.get('id'))
|
||||||
action = request.find('action')
|
action = request.find('action')
|
||||||
if action is None:
|
if action is None:
|
||||||
msg = 'Request {} has no action'.format(request_id)
|
msg = f'Request {request_id} has no action'
|
||||||
raise oscerr.WrongArgs(msg)
|
raise oscerr.WrongArgs(msg)
|
||||||
|
|
||||||
# Where are we targeting the package
|
# Where are we targeting the package
|
||||||
@ -511,7 +511,7 @@ class StagingAPI(object):
|
|||||||
else:
|
else:
|
||||||
# Supersedes request is from the same project
|
# Supersedes request is from the same project
|
||||||
if request_new.find('./action/source').get('project') == request_old.find('./action/source').get('project'):
|
if request_new.find('./action/source').get('project') == request_old.find('./action/source').get('project'):
|
||||||
message = 'sr#{} has newer source and is from the same project'.format(request_new.get('id'))
|
message = f"sr#{request_new.get('id')} has newer source and is from the same project"
|
||||||
|
|
||||||
self.rm_from_prj(stage_info['prj'], request_id=stage_info['rq_id'])
|
self.rm_from_prj(stage_info['prj'], request_id=stage_info['rq_id'])
|
||||||
self.do_change_review_state(stage_info['rq_id'], 'declined',
|
self.do_change_review_state(stage_info['rq_id'], 'declined',
|
||||||
@ -587,8 +587,8 @@ class StagingAPI(object):
|
|||||||
requests = []
|
requests = []
|
||||||
|
|
||||||
# xpath query, using the -m, -r, -s options
|
# xpath query, using the -m, -r, -s options
|
||||||
where = "@by_group='{}' and @state='new'".format(self.cstaging_group)
|
where = f"@by_group='{self.cstaging_group}' and @state='new'"
|
||||||
target = "target[@project='{}']".format(self.project)
|
target = f"target[@project='{self.project}']"
|
||||||
|
|
||||||
query = {'match': f"state/@name='review' and review[{where}] and {target}"}
|
query = {'match': f"state/@name='review' and review[{where}] and {target}"}
|
||||||
if query_extra is not None:
|
if query_extra is not None:
|
||||||
@ -763,13 +763,13 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
def format_review(self, review):
|
def format_review(self, review):
|
||||||
if review.get('by_group'):
|
if review.get('by_group'):
|
||||||
return 'group:{}'.format(review.get('by_group'))
|
return f"group:{review.get('by_group')}"
|
||||||
if review.get('by_user'):
|
if review.get('by_user'):
|
||||||
return review.get('by_user')
|
return review.get('by_user')
|
||||||
if review.get('by_package'):
|
if review.get('by_package'):
|
||||||
return 'package:{}'.format(review.get('by_package'))
|
return f"package:{review.get('by_package')}"
|
||||||
if review.get('by_project'):
|
if review.get('by_project'):
|
||||||
return 'project:{}'.format(review.get('by_project'))
|
return f"project:{review.get('by_project')}"
|
||||||
raise oscerr.WrongArgs('Invalid review')
|
raise oscerr.WrongArgs('Invalid review')
|
||||||
|
|
||||||
def job_history_fail_count(self, history):
|
def job_history_fail_count(self, history):
|
||||||
@ -806,7 +806,7 @@ class StagingAPI(object):
|
|||||||
if size:
|
if size:
|
||||||
offset += int(size[0])
|
offset += int(size[0])
|
||||||
|
|
||||||
query = {'nostream': '1', 'start': '%s' % offset}
|
query = {'nostream': '1', 'start': f'{offset}'}
|
||||||
if last:
|
if last:
|
||||||
query['last'] = 1
|
query['last'] = 1
|
||||||
log = StringIO()
|
log = StringIO()
|
||||||
@ -904,7 +904,7 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
req = get_request(self.apiurl, str(request_id))
|
req = get_request(self.apiurl, str(request_id))
|
||||||
if not req:
|
if not req:
|
||||||
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
raise oscerr.WrongArgs(f'Request {request_id} not found')
|
||||||
|
|
||||||
act = req.get_actions('submit')
|
act = req.get_actions('submit')
|
||||||
if act:
|
if act:
|
||||||
@ -949,7 +949,7 @@ class StagingAPI(object):
|
|||||||
if '_multibuild' in filelist:
|
if '_multibuild' in filelist:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
mainspec = "{}{}".format(package, '.spec')
|
mainspec = f"{package}.spec"
|
||||||
if mainspec in filelist:
|
if mainspec in filelist:
|
||||||
filelist.remove(mainspec)
|
filelist.remove(mainspec)
|
||||||
for file in filelist:
|
for file in filelist:
|
||||||
@ -1033,7 +1033,7 @@ class StagingAPI(object):
|
|||||||
# dynamically generated and static baselibs.conf.
|
# dynamically generated and static baselibs.conf.
|
||||||
if self.is_adi_project(project):
|
if self.is_adi_project(project):
|
||||||
baselibs = False
|
baselibs = False
|
||||||
specfile = source_file_load(self.apiurl, src_prj, src_pkg, '{}.spec'.format(src_pkg), src_rev)
|
specfile = source_file_load(self.apiurl, src_prj, src_pkg, f'{src_pkg}.spec', src_rev)
|
||||||
if specfile and 'baselibs.conf' in specfile:
|
if specfile and 'baselibs.conf' in specfile:
|
||||||
baselibs = True
|
baselibs = True
|
||||||
else:
|
else:
|
||||||
@ -1047,7 +1047,7 @@ class StagingAPI(object):
|
|||||||
http_PUT(url, data=ET.tostring(root))
|
http_PUT(url, data=ET.tostring(root))
|
||||||
|
|
||||||
if baselibs is False:
|
if baselibs is False:
|
||||||
specfile = source_file_load(self.apiurl, src_prj, src_pkg, '{}.spec'.format(sub_pkg), src_rev)
|
specfile = source_file_load(self.apiurl, src_prj, src_pkg, f'{sub_pkg}.spec', src_rev)
|
||||||
if specfile and 'baselibs.conf' in specfile:
|
if specfile and 'baselibs.conf' in specfile:
|
||||||
baselibs = True
|
baselibs = True
|
||||||
|
|
||||||
@ -1062,11 +1062,11 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
def ensure_staging_archs(self, project):
|
def ensure_staging_archs(self, project):
|
||||||
meta = ET.parse(http_GET(self.project_meta_url(project)))
|
meta = ET.parse(http_GET(self.project_meta_url(project)))
|
||||||
repository = meta.find('repository[@name="{}"]'.format(self.cmain_repo))
|
repository = meta.find(f'repository[@name="{self.cmain_repo}"]')
|
||||||
|
|
||||||
changed = False
|
changed = False
|
||||||
for arch in self.cstaging_archs:
|
for arch in self.cstaging_archs:
|
||||||
if not repository.xpath('./arch[text()="{}"]'.format(arch)):
|
if not repository.xpath(f'./arch[text()="{arch}"]'):
|
||||||
elm = ET.SubElement(repository, 'arch')
|
elm = ET.SubElement(repository, 'arch')
|
||||||
elm.text = arch
|
elm.text = arch
|
||||||
changed = True
|
changed = True
|
||||||
@ -1083,18 +1083,18 @@ class StagingAPI(object):
|
|||||||
def prj_from_letter(self, letter):
|
def prj_from_letter(self, letter):
|
||||||
if ':' in letter: # not a letter
|
if ':' in letter: # not a letter
|
||||||
return letter
|
return letter
|
||||||
return '{}:{}'.format(self.cstaging, letter)
|
return f'{self.cstaging}:{letter}'
|
||||||
|
|
||||||
def adi_prj_from_number(self, number):
|
def adi_prj_from_number(self, number):
|
||||||
if ':' in str(number):
|
if ':' in str(number):
|
||||||
return number
|
return number
|
||||||
return '{}:adi:{}'.format(self.cstaging, number)
|
return f'{self.cstaging}:adi:{number}'
|
||||||
|
|
||||||
def list_requests_in_prj(self, project):
|
def list_requests_in_prj(self, project):
|
||||||
where = "@by_project='%s'+and+@state='new'" % project
|
where = f"@by_project='{project}'+and+@state='new'"
|
||||||
|
|
||||||
url = self.makeurl(['search', 'request', 'id'],
|
url = self.makeurl(['search', 'request', 'id'],
|
||||||
"match=state/@name='review'+and+review[%s]" % where)
|
f"match=state/@name='review'+and+review[{where}]")
|
||||||
f = http_GET(url)
|
f = http_GET(url)
|
||||||
root = ET.parse(f).getroot()
|
root = ET.parse(f).getroot()
|
||||||
list = []
|
list = []
|
||||||
@ -1111,7 +1111,7 @@ class StagingAPI(object):
|
|||||||
"""
|
"""
|
||||||
req = get_request(self.apiurl, str(request_id))
|
req = get_request(self.apiurl, str(request_id))
|
||||||
if not req:
|
if not req:
|
||||||
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
raise oscerr.WrongArgs(f'Request {request_id} not found')
|
||||||
for i in req.reviews:
|
for i in req.reviews:
|
||||||
if by_project and i.by_project == by_project and i.state == 'new':
|
if by_project and i.by_project == by_project and i.state == 'new':
|
||||||
return
|
return
|
||||||
@ -1131,7 +1131,7 @@ class StagingAPI(object):
|
|||||||
if by_group:
|
if by_group:
|
||||||
query['by_group'] = by_group
|
query['by_group'] = by_group
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = 'Being evaluated by group "{}"'.format(by_group)
|
msg = f'Being evaluated by group "{by_group}"'
|
||||||
if not query:
|
if not query:
|
||||||
raise oscerr.WrongArgs('We need a group or a project')
|
raise oscerr.WrongArgs('We need a group or a project')
|
||||||
query['cmd'] = 'addreview'
|
query['cmd'] = 'addreview'
|
||||||
@ -1215,7 +1215,7 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
version = None
|
version = None
|
||||||
|
|
||||||
specfile = source_file_load(self.apiurl, project, package, '{}.spec'.format(package))
|
specfile = source_file_load(self.apiurl, project, package, f'{package}.spec')
|
||||||
if specfile:
|
if specfile:
|
||||||
try:
|
try:
|
||||||
version = re.findall('^Version:(.*)', specfile, re.MULTILINE)[0].strip()
|
version = re.findall('^Version:(.*)', specfile, re.MULTILINE)[0].strip()
|
||||||
@ -1227,7 +1227,7 @@ class StagingAPI(object):
|
|||||||
"""
|
"""
|
||||||
Return the version of a built rpm file
|
Return the version of a built rpm file
|
||||||
"""
|
"""
|
||||||
url = self.makeurl(['build', project, repository, arch, '_repository', "%s?view=fileinfo" % rpm])
|
url = self.makeurl(['build', project, repository, arch, '_repository', f"{rpm}?view=fileinfo"])
|
||||||
try:
|
try:
|
||||||
return ET.parse(http_GET(url)).getroot().find('version').text
|
return ET.parse(http_GET(url)).getroot().find('version').text
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
@ -1289,7 +1289,7 @@ class StagingAPI(object):
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
def is_repo_dirty(self, project, repository):
|
def is_repo_dirty(self, project, repository):
|
||||||
url = self.makeurl(['build', project, '_result?code=broken&repository=%s' % repository])
|
url = self.makeurl(['build', project, f'_result?code=broken&repository={repository}'])
|
||||||
root = ET.parse(http_GET(url)).getroot()
|
root = ET.parse(http_GET(url)).getroot()
|
||||||
for repo in root.findall('result'):
|
for repo in root.findall('result'):
|
||||||
repostate = repo.get('state', 'missing')
|
repostate = repo.get('state', 'missing')
|
||||||
@ -1331,10 +1331,10 @@ class StagingAPI(object):
|
|||||||
u = self.makeurl(['build', prj], query=query)
|
u = self.makeurl(['build', prj], query=query)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
print("tried to trigger rebuild for project '%s' package '%s'" % (prj, pkg))
|
print(f"tried to trigger rebuild for project '{prj}' package '{pkg}'")
|
||||||
http_POST(u)
|
http_POST(u)
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
print("could not trigger rebuild for project '%s' package '%s'" % (prj, pkg))
|
print(f"could not trigger rebuild for project '{prj}' package '{pkg}'")
|
||||||
|
|
||||||
def _candidate_adi_project(self):
|
def _candidate_adi_project(self):
|
||||||
"""Decide a candidate name for an ADI project."""
|
"""Decide a candidate name for an ADI project."""
|
||||||
@ -1349,7 +1349,7 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
def update_adi_frozenlinks(self, name, src_prj):
|
def update_adi_frozenlinks(self, name, src_prj):
|
||||||
xpath = {
|
xpath = {
|
||||||
'package': "@project='%s' and devel/@project='%s'" % (self.project, src_prj),
|
'package': f"@project='{self.project}' and devel/@project='{src_prj}'",
|
||||||
}
|
}
|
||||||
collection = search(self.apiurl, **xpath)['package']
|
collection = search(self.apiurl, **xpath)['package']
|
||||||
|
|
||||||
@ -1383,10 +1383,10 @@ class StagingAPI(object):
|
|||||||
|
|
||||||
adi_projects = self.get_adi_projects()
|
adi_projects = self.get_adi_projects()
|
||||||
if name in adi_projects:
|
if name in adi_projects:
|
||||||
raise Exception('Project {} already exist'.format(name))
|
raise Exception(f'Project {name} already exist')
|
||||||
|
|
||||||
if use_frozenlinks:
|
if use_frozenlinks:
|
||||||
linkproject = '<link project="{}"/>'.format(self.project)
|
linkproject = f'<link project="{self.project}"/>'
|
||||||
repository = '<repository name="standard" rebuild="direct" linkedbuild="all">'
|
repository = '<repository name="standard" rebuild="direct" linkedbuild="all">'
|
||||||
else:
|
else:
|
||||||
linkproject = ''
|
linkproject = ''
|
||||||
@ -1471,7 +1471,7 @@ class StagingAPI(object):
|
|||||||
name.text = check
|
name.text = check
|
||||||
|
|
||||||
meta = ET.parse(http_GET(self.project_meta_url(project)))
|
meta = ET.parse(http_GET(self.project_meta_url(project)))
|
||||||
repository = meta.find('repository[@name="{}"]'.format(self.cmain_repo))
|
repository = meta.find(f'repository[@name="{self.cmain_repo}"]')
|
||||||
|
|
||||||
for arch_element in repository.findall('arch'):
|
for arch_element in repository.findall('arch'):
|
||||||
architecture = arch_element.text
|
architecture = arch_element.text
|
||||||
@ -1480,7 +1480,7 @@ class StagingAPI(object):
|
|||||||
http_POST(url, data=ET.tostring(root))
|
http_POST(url, data=ET.tostring(root))
|
||||||
|
|
||||||
def register_new_staging_project(self, name):
|
def register_new_staging_project(self, name):
|
||||||
data = '<workflow><staging_project>{}</staging_project></workflow>'.format(name)
|
data = f'<workflow><staging_project>{name}</staging_project></workflow>'
|
||||||
url = self.makeurl(['staging', self.project, 'staging_projects'])
|
url = self.makeurl(['staging', self.project, 'staging_projects'])
|
||||||
try:
|
try:
|
||||||
http_POST(url, data=data)
|
http_POST(url, data=data)
|
||||||
@ -1492,7 +1492,7 @@ class StagingAPI(object):
|
|||||||
def is_user_member_of(self, user, group):
|
def is_user_member_of(self, user, group):
|
||||||
root = ET.fromstring(get_group(self.apiurl, group))
|
root = ET.fromstring(get_group(self.apiurl, group))
|
||||||
|
|
||||||
if root.findall("./person/person[@userid='%s']" % user):
|
if root.findall(f"./person/person[@userid='{user}']"):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@ -1520,7 +1520,7 @@ class StagingAPI(object):
|
|||||||
if self.rings:
|
if self.rings:
|
||||||
# Determine if staging is bootstrapped.
|
# Determine if staging is bootstrapped.
|
||||||
meta = self.get_prj_meta(project)
|
meta = self.get_prj_meta(project)
|
||||||
xpath = 'link[@project="{}"]'.format(self.rings[0])
|
xpath = f'link[@project="{self.rings[0]}"]'
|
||||||
return meta.find(xpath) is not None
|
return meta.find(xpath) is not None
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
@ -25,7 +25,7 @@ class UnignoreCommand(object):
|
|||||||
else:
|
else:
|
||||||
for request_id in RequestFinder.find_sr(requests, self.api):
|
for request_id in RequestFinder.find_sr(requests, self.api):
|
||||||
if request_id in requests_ignored.keys():
|
if request_id in requests_ignored.keys():
|
||||||
print('{}: unignored'.format(request_id))
|
print(f'{request_id}: unignored')
|
||||||
del requests_ignored[request_id]
|
del requests_ignored[request_id]
|
||||||
self.api.del_ignored_request(request_id)
|
self.api.del_ignored_request(request_id)
|
||||||
self.comment.add_comment(request_id=str(request_id), comment=self.MESSAGE)
|
self.comment.add_comment(request_id=str(request_id), comment=self.MESSAGE)
|
||||||
|
@ -45,7 +45,7 @@ class UnselectCommand(object):
|
|||||||
if cleanup:
|
if cleanup:
|
||||||
obsolete = self.api.project_status_requests('obsolete', self.filter_obsolete)
|
obsolete = self.api.project_status_requests('obsolete', self.filter_obsolete)
|
||||||
if len(obsolete) > 0:
|
if len(obsolete) > 0:
|
||||||
print('Cleanup {} obsolete requests'.format(len(obsolete)))
|
print(f'Cleanup {len(obsolete)} obsolete requests')
|
||||||
packages += tuple(obsolete)
|
packages += tuple(obsolete)
|
||||||
|
|
||||||
affected_projects = set()
|
affected_projects = set()
|
||||||
@ -53,7 +53,7 @@ class UnselectCommand(object):
|
|||||||
self.api).items():
|
self.api).items():
|
||||||
staging_project = request_project['staging']
|
staging_project = request_project['staging']
|
||||||
affected_projects.add(staging_project)
|
affected_projects.add(staging_project)
|
||||||
print('Unselecting "{}" from "{}"'.format(request, staging_project))
|
print(f'Unselecting "{request}" from "{staging_project}"')
|
||||||
self.api.rm_from_prj(staging_project, request_id=request)
|
self.api.rm_from_prj(staging_project, request_id=request)
|
||||||
|
|
||||||
req = get_request(self.api.apiurl, str(request))
|
req = get_request(self.api.apiurl, str(request))
|
||||||
|
@ -170,12 +170,12 @@ def mail_send(apiurl, project, to, subject, body, from_key='maintainer',
|
|||||||
if from_key is None:
|
if from_key is None:
|
||||||
sender = entity_email(apiurl, conf.get_apiurl_usr(apiurl), include_name=True)
|
sender = entity_email(apiurl, conf.get_apiurl_usr(apiurl), include_name=True)
|
||||||
else:
|
else:
|
||||||
sender = config['mail-{}'.format(from_key)]
|
sender = config[f'mail-{from_key}']
|
||||||
|
|
||||||
if '@' not in to:
|
if '@' not in to:
|
||||||
to = config['mail-{}'.format(to)]
|
to = config[f'mail-{to}']
|
||||||
|
|
||||||
followup_to = config.get('mail-{}'.format(followup_to_key))
|
followup_to = config.get(f'mail-{followup_to_key}')
|
||||||
relay = config.get('mail-relay', 'relay.suse.de')
|
relay = config.get('mail-relay', 'relay.suse.de')
|
||||||
|
|
||||||
mail_send_with_details(text=body, subject=subject, relay=relay, sender=sender,
|
mail_send_with_details(text=body, subject=subject, relay=relay, sender=sender,
|
||||||
|
@ -46,7 +46,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
|||||||
@cmdln.option('-d', '--dry', help='no modifications uploaded')
|
@cmdln.option('-d', '--dry', help='no modifications uploaded')
|
||||||
@cmdln.option('-p', '--project', help='target project')
|
@cmdln.option('-p', '--project', help='target project')
|
||||||
@cmdln.option('-g', '--git-url', help='git repository for target project')
|
@cmdln.option('-g', '--git-url', help='git repository for target project')
|
||||||
@cmdln.option('-s', '--scope', help='scope on which to operate ({}, staging:$letter)'.format(', '.join(SCOPES)))
|
@cmdln.option('-s', '--scope', help=f"scope on which to operate ({', '.join(SCOPES)}, staging:$letter)")
|
||||||
@cmdln.option('--no-checkout', action='store_true', help='reuse checkout in cache')
|
@cmdln.option('--no-checkout', action='store_true', help='reuse checkout in cache')
|
||||||
@cmdln.option('--stop-after-solve', action='store_true', help='only create group files')
|
@cmdln.option('--stop-after-solve', action='store_true', help='only create group files')
|
||||||
@cmdln.option('--staging', help='Only solve that one staging')
|
@cmdln.option('--staging', help='Only solve that one staging')
|
||||||
@ -115,4 +115,4 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
|||||||
elif scope == 'ring1':
|
elif scope == 'ring1':
|
||||||
return solve_project(api.rings[1], scope)
|
return solve_project(api.rings[1], scope)
|
||||||
else:
|
else:
|
||||||
raise ValueError('scope "{}" must be one of: {}'.format(scope, ', '.join(self.SCOPES)))
|
raise ValueError(f"scope \"{scope}\" must be one of: {', '.join(self.SCOPES)}")
|
||||||
|
@ -44,7 +44,7 @@ def copy_directory_contents(source, destination, ignore_list=[]):
|
|||||||
|
|
||||||
|
|
||||||
def change_extension(path, original, final):
|
def change_extension(path, original, final):
|
||||||
for name in glob.glob(os.path.join(path, '*{}'.format(original))):
|
for name in glob.glob(os.path.join(path, f'*{original}')):
|
||||||
# Assumes the extension is only found at the end.
|
# Assumes the extension is only found at the end.
|
||||||
os.rename(name, name.replace(original, final))
|
os.rename(name, name.replace(original, final))
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ class Group(object):
|
|||||||
|
|
||||||
def _verify_solved(self):
|
def _verify_solved(self):
|
||||||
if not self.solved:
|
if not self.solved:
|
||||||
raise Exception('group {} not solved'.format(self.name))
|
raise Exception(f'group {self.name} not solved')
|
||||||
|
|
||||||
def inherit(self, group):
|
def inherit(self, group):
|
||||||
for arch in self.architectures:
|
for arch in self.architectures:
|
||||||
@ -144,7 +144,7 @@ class Group(object):
|
|||||||
jobs = list(self.pkglist.lockjobs[arch])
|
jobs = list(self.pkglist.lockjobs[arch])
|
||||||
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
||||||
if sel.isempty():
|
if sel.isempty():
|
||||||
self.logger.debug('{}.{}: package {} not found'.format(self.name, arch, n))
|
self.logger.debug(f'{self.name}.{arch}: package {n} not found')
|
||||||
self.not_found.setdefault(n, set()).add(arch)
|
self.not_found.setdefault(n, set()).add(arch)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
@ -168,14 +168,14 @@ class Group(object):
|
|||||||
for s in self.silents:
|
for s in self.silents:
|
||||||
sel = pool.select(str(s), solv.Selection.SELECTION_NAME | solv.Selection.SELECTION_FLAT)
|
sel = pool.select(str(s), solv.Selection.SELECTION_NAME | solv.Selection.SELECTION_FLAT)
|
||||||
if sel.isempty():
|
if sel.isempty():
|
||||||
self.logger.warning('{}.{}: silent package {} not found'.format(self.name, arch, s))
|
self.logger.warning(f'{self.name}.{arch}: silent package {s} not found')
|
||||||
else:
|
else:
|
||||||
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
||||||
|
|
||||||
problems = solver.solve(jobs)
|
problems = solver.solve(jobs)
|
||||||
if problems:
|
if problems:
|
||||||
for problem in problems:
|
for problem in problems:
|
||||||
msg = 'unresolvable: {}:{}.{}: {}'.format(self.name, n, arch, problem)
|
msg = f'unresolvable: {self.name}:{n}.{arch}: {problem}'
|
||||||
self.logger.debug(msg)
|
self.logger.debug(msg)
|
||||||
self.unresolvable[arch][n] = str(problem)
|
self.unresolvable[arch][n] = str(problem)
|
||||||
return
|
return
|
||||||
@ -361,7 +361,7 @@ class Group(object):
|
|||||||
root.append(c)
|
root.append(c)
|
||||||
|
|
||||||
if arch != '*':
|
if arch != '*':
|
||||||
ET.SubElement(root, 'conditional', {'name': 'only_{}'.format(arch)})
|
ET.SubElement(root, 'conditional', {'name': f'only_{arch}'})
|
||||||
packagelist = ET.SubElement(root, 'packagelist', {'relationship': 'recommends'})
|
packagelist = ET.SubElement(root, 'packagelist', {'relationship': 'recommends'})
|
||||||
|
|
||||||
missing = dict()
|
missing = dict()
|
||||||
@ -372,14 +372,14 @@ class Group(object):
|
|||||||
if name in self.silents:
|
if name in self.silents:
|
||||||
continue
|
continue
|
||||||
if name in missing:
|
if name in missing:
|
||||||
msg = ' {} not found on {}'.format(name, ','.join(sorted(missing[name])))
|
msg = f" {name} not found on {','.join(sorted(missing[name]))}"
|
||||||
if ignore_broken and name not in self.required:
|
if ignore_broken and name not in self.required:
|
||||||
c = ET.Comment(msg)
|
c = ET.Comment(msg)
|
||||||
packagelist.append(c)
|
packagelist.append(c)
|
||||||
continue
|
continue
|
||||||
name = msg
|
name = msg
|
||||||
if name in unresolvable:
|
if name in unresolvable:
|
||||||
msg = ' {} uninstallable: {}'.format(name, unresolvable[name])
|
msg = f' {name} uninstallable: {unresolvable[name]}'
|
||||||
if ignore_broken and name not in self.required:
|
if ignore_broken and name not in self.required:
|
||||||
c = ET.Comment(msg)
|
c = ET.Comment(msg)
|
||||||
packagelist.append(c)
|
packagelist.append(c)
|
||||||
@ -393,7 +393,7 @@ class Group(object):
|
|||||||
attrs['supportstatus'] = status
|
attrs['supportstatus'] = status
|
||||||
ET.SubElement(packagelist, 'package', attrs)
|
ET.SubElement(packagelist, 'package', attrs)
|
||||||
if name in packages and packages[name]:
|
if name in packages and packages[name]:
|
||||||
c = ET.Comment(' reason: {} '.format(packages[name]))
|
c = ET.Comment(f' reason: {packages[name]} ')
|
||||||
packagelist.append(c)
|
packagelist.append(c)
|
||||||
|
|
||||||
return root
|
return root
|
||||||
|
@ -129,7 +129,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
for name in self.groups:
|
for name in self.groups:
|
||||||
group = self.groups[name]
|
group = self.groups[name]
|
||||||
group.solved_packages = dict()
|
group.solved_packages = dict()
|
||||||
fn = '{}.group'.format(group.name)
|
fn = f'{group.name}.group'
|
||||||
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
||||||
for arch in archs:
|
for arch in archs:
|
||||||
x = group.toxml(arch, group.ignore_broken, None)
|
x = group.toxml(arch, group.ignore_broken, None)
|
||||||
@ -189,7 +189,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
if not group.solved:
|
if not group.solved:
|
||||||
continue
|
continue
|
||||||
summary[name] = group.summary()
|
summary[name] = group.summary()
|
||||||
fn = '{}.group'.format(group.name)
|
fn = f'{group.name}.group'
|
||||||
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
||||||
comment = group.comment
|
comment = group.comment
|
||||||
for arch in archs:
|
for arch in archs:
|
||||||
@ -247,7 +247,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
tocheck.add(s.name)
|
tocheck.add(s.name)
|
||||||
|
|
||||||
for locale in self.locales:
|
for locale in self.locales:
|
||||||
id = pool.str2id('locale({})'.format(locale))
|
id = pool.str2id(f'locale({locale})')
|
||||||
for s in pool.whatprovides(id):
|
for s in pool.whatprovides(id):
|
||||||
tocheck_locales.add(s.name)
|
tocheck_locales.add(s.name)
|
||||||
|
|
||||||
@ -283,7 +283,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
continue
|
continue
|
||||||
s = f'repo-{project}-{reponame}-{arch}-{state}.solv'
|
s = f'repo-{project}-{reponame}-{arch}-{state}.solv'
|
||||||
if not repo.add_solv(s):
|
if not repo.add_solv(s):
|
||||||
raise MismatchedRepoException('failed to add repo {}/{}/{}'.format(project, reponame, arch))
|
raise MismatchedRepoException(f'failed to add repo {project}/{reponame}/{arch}')
|
||||||
for solvable in repo.solvables_iter():
|
for solvable in repo.solvables_iter():
|
||||||
if ignore_conflicts:
|
if ignore_conflicts:
|
||||||
solvable.unset(solv.SOLVABLE_CONFLICTS)
|
solvable.unset(solv.SOLVABLE_CONFLICTS)
|
||||||
@ -432,11 +432,11 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
# Repo might not have this architecture
|
# Repo might not have this architecture
|
||||||
continue
|
continue
|
||||||
|
|
||||||
repo_solv_name = 'repo-{}-{}-{}.solv'.format(project, repo, arch)
|
repo_solv_name = f'repo-{project}-{repo}-{arch}.solv'
|
||||||
# Would be preferable to include hash in name, but cumbersome to handle without
|
# Would be preferable to include hash in name, but cumbersome to handle without
|
||||||
# reworking a fair bit since the state needs to be tracked.
|
# reworking a fair bit since the state needs to be tracked.
|
||||||
solv_file = os.path.join(CACHEDIR, repo_solv_name)
|
solv_file = os.path.join(CACHEDIR, repo_solv_name)
|
||||||
solv_file_hash = '{}::{}'.format(solv_file, state)
|
solv_file_hash = f'{solv_file}::{state}'
|
||||||
if os.path.exists(solv_file) and os.path.exists(solv_file_hash):
|
if os.path.exists(solv_file) and os.path.exists(solv_file_hash):
|
||||||
# Solve file exists and hash unchanged, skip updating solv.
|
# Solve file exists and hash unchanged, skip updating solv.
|
||||||
self.logger.debug('skipping solv generation for {} due to matching state {}'.format(
|
self.logger.debug('skipping solv generation for {} due to matching state {}'.format(
|
||||||
@ -464,7 +464,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
pool.setarch()
|
pool.setarch()
|
||||||
|
|
||||||
# we need some progress in the debug output - or gocd gets nervous
|
# we need some progress in the debug output - or gocd gets nervous
|
||||||
self.logger.debug('checking {}'.format(oldrepo))
|
self.logger.debug(f'checking {oldrepo}')
|
||||||
oldsysrepo = file_utils.add_susetags(pool, oldrepo)
|
oldsysrepo = file_utils.add_susetags(pool, oldrepo)
|
||||||
|
|
||||||
for arch in self.all_architectures:
|
for arch in self.all_architectures:
|
||||||
@ -476,7 +476,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
fn = f'repo-{project}-{repo}-{arch}-{state}.solv'
|
fn = f'repo-{project}-{repo}-{arch}-{state}.solv'
|
||||||
r = pool.add_repo('/'.join([project, repo]))
|
r = pool.add_repo('/'.join([project, repo]))
|
||||||
if not r.add_solv(fn):
|
if not r.add_solv(fn):
|
||||||
raise MismatchedRepoException('failed to add repo {}/{}/{}.'.format(project, repo, arch))
|
raise MismatchedRepoException(f'failed to add repo {project}/{repo}/{arch}.')
|
||||||
|
|
||||||
pool.createwhatprovides()
|
pool.createwhatprovides()
|
||||||
|
|
||||||
@ -534,7 +534,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
if not repo_output:
|
if not repo_output:
|
||||||
print('#', repo, file=output)
|
print('#', repo, file=output)
|
||||||
repo_output = True
|
repo_output = True
|
||||||
print('Provides: weakremover({})'.format(name), file=output)
|
print(f'Provides: weakremover({name})', file=output)
|
||||||
else:
|
else:
|
||||||
jarch = ' '.join(sorted(drops[name]['archs']))
|
jarch = ' '.join(sorted(drops[name]['archs']))
|
||||||
exclusives.setdefault(jarch, []).append(name)
|
exclusives.setdefault(jarch, []).append(name)
|
||||||
@ -543,9 +543,9 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
if not repo_output:
|
if not repo_output:
|
||||||
print('#', repo, file=output)
|
print('#', repo, file=output)
|
||||||
repo_output = True
|
repo_output = True
|
||||||
print('%ifarch {}'.format(arch), file=output)
|
print(f'%ifarch {arch}', file=output)
|
||||||
for name in sorted(exclusives[arch]):
|
for name in sorted(exclusives[arch]):
|
||||||
print('Provides: weakremover({})'.format(name), file=output)
|
print(f'Provides: weakremover({name})', file=output)
|
||||||
print('%endif', file=output)
|
print('%endif', file=output)
|
||||||
output.flush()
|
output.flush()
|
||||||
|
|
||||||
@ -628,7 +628,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
|
|
||||||
def build_stub(self, destination, extension):
|
def build_stub(self, destination, extension):
|
||||||
with open(os.path.join(destination, '.'.join(['stub', extension])), 'w+') as f:
|
with open(os.path.join(destination, '.'.join(['stub', extension])), 'w+') as f:
|
||||||
f.write('# prevent building single {} files twice\n'.format(extension))
|
f.write(f'# prevent building single {extension} files twice\n')
|
||||||
f.write('Name: stub\n')
|
f.write('Name: stub\n')
|
||||||
f.write('Version: 0.0\n')
|
f.write('Version: 0.0\n')
|
||||||
|
|
||||||
@ -645,7 +645,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
package.commit(msg='Automatic update', skip_local_service_run=True)
|
package.commit(msg='Automatic update', skip_local_service_run=True)
|
||||||
|
|
||||||
def replace_product_version(self, product_file, product_version):
|
def replace_product_version(self, product_file, product_version):
|
||||||
product_version = '<version>{}</version>'.format(product_version)
|
product_version = f'<version>{product_version}</version>'
|
||||||
lines = open(product_file).readlines()
|
lines = open(product_file).readlines()
|
||||||
new_lines = []
|
new_lines = []
|
||||||
for line in lines:
|
for line in lines:
|
||||||
@ -670,7 +670,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
|
self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
|
||||||
self.use_newest_version = str2bool(target_config.get('pkglistgen-use-newest-version', 'False'))
|
self.use_newest_version = str2bool(target_config.get('pkglistgen-use-newest-version', 'False'))
|
||||||
self.repos = self.expand_repos(project, main_repo)
|
self.repos = self.expand_repos(project, main_repo)
|
||||||
logging.debug('[{}] {}/{}: update and solve'.format(scope, project, main_repo))
|
logging.debug(f'[{scope}] {project}/{main_repo}: update and solve')
|
||||||
|
|
||||||
group = target_config.get('pkglistgen-group', '000package-groups')
|
group = target_config.get('pkglistgen-group', '000package-groups')
|
||||||
product = target_config.get('pkglistgen-product', '000product')
|
product = target_config.get('pkglistgen-product', '000product')
|
||||||
@ -691,7 +691,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
|
root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
|
||||||
repository=[main_repo], multibuild=True))
|
repository=[main_repo], multibuild=True))
|
||||||
if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
|
if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
|
||||||
logging.info('{}/{} build in progress'.format(project, product))
|
logging.info(f'{project}/{product} build in progress')
|
||||||
return
|
return
|
||||||
if git_url:
|
if git_url:
|
||||||
if os.path.exists(cache_dir + "/.git"):
|
if os.path.exists(cache_dir + "/.git"):
|
||||||
@ -711,21 +711,21 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
else:
|
else:
|
||||||
url = api.makeurl(['source', project])
|
url = api.makeurl(['source', project])
|
||||||
packages = ET.parse(http_GET(url)).getroot()
|
packages = ET.parse(http_GET(url)).getroot()
|
||||||
if packages.find('entry[@name="{}"]'.format(product)) is None:
|
if packages.find(f'entry[@name="{product}"]') is None:
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
undelete_package(api.apiurl, project, product, 'revive')
|
undelete_package(api.apiurl, project, product, 'revive')
|
||||||
# TODO disable build.
|
# TODO disable build.
|
||||||
logging.info('{} undeleted, skip dvd until next cycle'.format(product))
|
logging.info(f'{product} undeleted, skip dvd until next cycle')
|
||||||
return
|
return
|
||||||
|
|
||||||
drop_list = api.item_exists(project, oldrepos)
|
drop_list = api.item_exists(project, oldrepos)
|
||||||
if drop_list and not only_release_packages:
|
if drop_list and not only_release_packages:
|
||||||
checkout_list.append(oldrepos)
|
checkout_list.append(oldrepos)
|
||||||
|
|
||||||
if packages.find('entry[@name="{}"]'.format(release)) is None:
|
if packages.find(f'entry[@name="{release}"]') is None:
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
undelete_package(api.apiurl, project, release, 'revive')
|
undelete_package(api.apiurl, project, release, 'revive')
|
||||||
logging.info('{} undeleted, skip dvd until next cycle'.format(release))
|
logging.info(f'{release} undeleted, skip dvd until next cycle')
|
||||||
return
|
return
|
||||||
|
|
||||||
if not no_checkout:
|
if not no_checkout:
|
||||||
@ -743,7 +743,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
self.output_dir = product_dir
|
self.output_dir = product_dir
|
||||||
|
|
||||||
if not no_checkout and not git_url:
|
if not no_checkout and not git_url:
|
||||||
logging.debug('Skipping checkout of {}'.format(project))
|
logging.debug(f'Skipping checkout of {project}')
|
||||||
for package in checkout_list:
|
for package in checkout_list:
|
||||||
checkout_package(api.apiurl, project, package, expand_link=True,
|
checkout_package(api.apiurl, project, package, expand_link=True,
|
||||||
prj_dir=cache_dir, outdir=os.path.join(cache_dir, package))
|
prj_dir=cache_dir, outdir=os.path.join(cache_dir, package))
|
||||||
@ -833,7 +833,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
logging.debug(subprocess.check_output(
|
logging.debug(subprocess.check_output(
|
||||||
[PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))
|
[PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))
|
||||||
|
|
||||||
for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
|
for delete_kiwi in target_config.get(f'pkglistgen-delete-kiwis-{scope}', '').split(' '):
|
||||||
delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
|
delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
|
||||||
file_utils.unlink_list(product_dir, delete_kiwis)
|
file_utils.unlink_list(product_dir, delete_kiwis)
|
||||||
if scope == 'staging':
|
if scope == 'staging':
|
||||||
|
@ -95,7 +95,7 @@ def parse_repomd(repo, baseurl):
|
|||||||
sha = hashlib.sha256(primary.content).hexdigest()
|
sha = hashlib.sha256(primary.content).hexdigest()
|
||||||
|
|
||||||
if sha != sha_expected:
|
if sha != sha_expected:
|
||||||
raise Exception('checksums do not match {} != {}'.format(sha, sha_expected))
|
raise Exception(f'checksums do not match {sha} != {sha_expected}')
|
||||||
|
|
||||||
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
||||||
f.write(primary.content)
|
f.write(primary.content)
|
||||||
@ -165,13 +165,13 @@ def print_repo_delta(pool, repo2, packages_file):
|
|||||||
present = dict()
|
present = dict()
|
||||||
for s in pool.solvables_iter():
|
for s in pool.solvables_iter():
|
||||||
if s.repo != repo2:
|
if s.repo != repo2:
|
||||||
key = '{}/{}'.format(s.name, s.arch)
|
key = f'{s.name}/{s.arch}'
|
||||||
present.setdefault(key, {})
|
present.setdefault(key, {})
|
||||||
present[key][s.evr] = s.repo
|
present[key][s.evr] = s.repo
|
||||||
for s in repo2.solvables:
|
for s in repo2.solvables:
|
||||||
if s.arch == 'src':
|
if s.arch == 'src':
|
||||||
continue
|
continue
|
||||||
key = '{}/{}'.format(s.name, s.arch)
|
key = f'{s.name}/{s.arch}'
|
||||||
if present.get(key, {}).get(s.evr):
|
if present.get(key, {}).get(s.evr):
|
||||||
continue
|
continue
|
||||||
elif key not in present:
|
elif key not in present:
|
||||||
@ -277,7 +277,7 @@ def update_project(apiurl, project, fixate=None):
|
|||||||
|
|
||||||
if opts.get('refresh', False):
|
if opts.get('refresh', False):
|
||||||
opts['build'] = dump_solv_build(opts['url'])
|
opts['build'] = dump_solv_build(opts['url'])
|
||||||
path = '{}_{}.packages'.format(key, opts['build'])
|
path = f"{key}_{opts['build']}.packages"
|
||||||
else:
|
else:
|
||||||
path = key + '.packages'
|
path = key + '.packages'
|
||||||
packages_file = os.path.join(repo_dir, path)
|
packages_file = os.path.join(repo_dir, path)
|
||||||
|
@ -44,13 +44,13 @@ class RepoChecker():
|
|||||||
if not repository:
|
if not repository:
|
||||||
repository = self.project_repository(project)
|
repository = self.project_repository(project)
|
||||||
if not repository:
|
if not repository:
|
||||||
self.logger.error('a repository must be specified via OSRT:Config main-repo for {}'.format(project))
|
self.logger.error(f'a repository must be specified via OSRT:Config main-repo for {project}')
|
||||||
return
|
return
|
||||||
self.repository = repository
|
self.repository = repository
|
||||||
|
|
||||||
archs = target_archs(self.apiurl, project, repository)
|
archs = target_archs(self.apiurl, project, repository)
|
||||||
if not len(archs):
|
if not len(archs):
|
||||||
self.logger.debug('{} has no relevant architectures'.format(project))
|
self.logger.debug(f'{project} has no relevant architectures')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for arch in archs:
|
for arch in archs:
|
||||||
@ -79,7 +79,7 @@ class RepoChecker():
|
|||||||
continue
|
continue
|
||||||
if comment.get('package') in comments:
|
if comment.get('package') in comments:
|
||||||
continue
|
continue
|
||||||
self.logger.info("Removing comment for package {}".format(comment.get('package')))
|
self.logger.info(f"Removing comment for package {comment.get('package')}")
|
||||||
url = makeurl(self.apiurl, ['comment', comment.get('id')])
|
url = makeurl(self.apiurl, ['comment', comment.get('id')])
|
||||||
http_DELETE(url)
|
http_DELETE(url)
|
||||||
|
|
||||||
@ -101,7 +101,7 @@ class RepoChecker():
|
|||||||
|
|
||||||
if oldcomment:
|
if oldcomment:
|
||||||
commentapi.delete(oldcomment['id'])
|
commentapi.delete(oldcomment['id'])
|
||||||
self.logger.debug("Adding comment to {}/{}".format(self.project, package))
|
self.logger.debug(f"Adding comment to {self.project}/{package}")
|
||||||
commentapi.add_comment(project_name=self.project, package_name=package, comment=newcomment)
|
commentapi.add_comment(project_name=self.project, package_name=package, comment=newcomment)
|
||||||
|
|
||||||
def _split_and_filter(self, output):
|
def _split_and_filter(self, output):
|
||||||
@ -123,7 +123,7 @@ class RepoChecker():
|
|||||||
def project_repository(self, project):
|
def project_repository(self, project):
|
||||||
repository = Config.get(self.apiurl, project).get('main-repo')
|
repository = Config.get(self.apiurl, project).get('main-repo')
|
||||||
if not repository:
|
if not repository:
|
||||||
self.logger.debug('no main-repo defined for {}'.format(project))
|
self.logger.debug(f'no main-repo defined for {project}')
|
||||||
|
|
||||||
search_project = 'openSUSE:Factory'
|
search_project = 'openSUSE:Factory'
|
||||||
for search_repository in ('snapshot', 'standard'):
|
for search_repository in ('snapshot', 'standard'):
|
||||||
@ -142,7 +142,7 @@ class RepoChecker():
|
|||||||
return
|
return
|
||||||
|
|
||||||
state_yaml = yaml.dump(state, default_flow_style=False)
|
state_yaml = yaml.dump(state, default_flow_style=False)
|
||||||
comment = 'Updated rebuild infos for {}/{}/{}'.format(self.project, self.repository, self.arch)
|
comment = f'Updated rebuild infos for {self.project}/{self.repository}/{self.arch}'
|
||||||
source_file_ensure(self.apiurl, self.store_project, self.store_package,
|
source_file_ensure(self.apiurl, self.store_project, self.store_package,
|
||||||
self.store_filename, state_yaml, comment=comment)
|
self.store_filename, state_yaml, comment=comment)
|
||||||
|
|
||||||
@ -157,7 +157,7 @@ class RepoChecker():
|
|||||||
for rpm, rcode in buildresult.items():
|
for rpm, rcode in buildresult.items():
|
||||||
if rcode != code:
|
if rcode != code:
|
||||||
continue
|
continue
|
||||||
source = "{}/{}/{}/{}".format(self.project, self.repository, self.arch, rpm)
|
source = f"{self.project}/{self.repository}/{self.arch}/{rpm}"
|
||||||
if source not in oldstate[code]:
|
if source not in oldstate[code]:
|
||||||
oldstate[code][source] = str(datetime.now())
|
oldstate[code][source] = str(datetime.now())
|
||||||
|
|
||||||
@ -165,7 +165,7 @@ class RepoChecker():
|
|||||||
config = Config.get(self.apiurl, project)
|
config = Config.get(self.apiurl, project)
|
||||||
|
|
||||||
oldstate = None
|
oldstate = None
|
||||||
self.store_filename = 'rebuildpacs.{}-{}.yaml'.format(project, repository)
|
self.store_filename = f'rebuildpacs.{project}-{repository}.yaml'
|
||||||
if self.store_project and self.store_package:
|
if self.store_project and self.store_package:
|
||||||
state_yaml = source_file_load(self.apiurl, self.store_project, self.store_package,
|
state_yaml = source_file_load(self.apiurl, self.store_project, self.store_package,
|
||||||
self.store_filename)
|
self.store_filename)
|
||||||
@ -244,7 +244,7 @@ class RepoChecker():
|
|||||||
config.get(f'installcheck-ignore-conflicts-{arch}', '').split()
|
config.get(f'installcheck-ignore-conflicts-{arch}', '').split()
|
||||||
|
|
||||||
for package, entry in parsed.items():
|
for package, entry in parsed.items():
|
||||||
source = "{}/{}/{}/{}".format(project, repository, arch, entry['source'])
|
source = f"{project}/{repository}/{arch}/{entry['source']}"
|
||||||
per_source.setdefault(source, {'output': [], 'buildresult': buildresult.get(entry['source'], 'gone'), 'ignored': True})
|
per_source.setdefault(source, {'output': [], 'buildresult': buildresult.get(entry['source'], 'gone'), 'ignored': True})
|
||||||
per_source[source]['output'].extend(entry['output'])
|
per_source[source]['output'].extend(entry['output'])
|
||||||
if package not in ignore_conflicts:
|
if package not in ignore_conflicts:
|
||||||
@ -255,7 +255,7 @@ class RepoChecker():
|
|||||||
for source in sorted(per_source):
|
for source in sorted(per_source):
|
||||||
if not len(per_source[source]['output']):
|
if not len(per_source[source]['output']):
|
||||||
continue
|
continue
|
||||||
self.logger.debug("{} builds: {}".format(source, per_source[source]['buildresult']))
|
self.logger.debug(f"{source} builds: {per_source[source]['buildresult']}")
|
||||||
self.logger.debug(" " + "\n ".join(per_source[source]['output']))
|
self.logger.debug(" " + "\n ".join(per_source[source]['output']))
|
||||||
if per_source[source]['buildresult'] != 'succeeded': # nothing we can do
|
if per_source[source]['buildresult'] != 'succeeded': # nothing we can do
|
||||||
continue
|
continue
|
||||||
@ -276,7 +276,7 @@ class RepoChecker():
|
|||||||
'rebuild': str(datetime.now())}
|
'rebuild': str(datetime.now())}
|
||||||
|
|
||||||
for source in list(oldstate['check']):
|
for source in list(oldstate['check']):
|
||||||
if not source.startswith('{}/{}/{}/'.format(project, repository, arch)):
|
if not source.startswith(f'{project}/{repository}/{arch}/'):
|
||||||
continue
|
continue
|
||||||
code = buildresult.get(os.path.basename(source), 'gone')
|
code = buildresult.get(os.path.basename(source), 'gone')
|
||||||
if code == 'gone' or code == 'excluded':
|
if code == 'gone' or code == 'excluded':
|
||||||
@ -319,7 +319,7 @@ class RepoChecker():
|
|||||||
m = hashlib.sha256()
|
m = hashlib.sha256()
|
||||||
for bdep in sorted(infos[package]['deps']):
|
for bdep in sorted(infos[package]['deps']):
|
||||||
m.update(bytes(bdep + '-' + infos[package]['deps'][bdep], 'utf-8'))
|
m.update(bytes(bdep + '-' + infos[package]['deps'][bdep], 'utf-8'))
|
||||||
state_key = '{}/{}/{}/{}'.format(project, repository, arch, package)
|
state_key = f'{project}/{repository}/{arch}/{package}'
|
||||||
olddigest = oldstate['leafs'].get(state_key, {}).get('buildinfo')
|
olddigest = oldstate['leafs'].get(state_key, {}).get('buildinfo')
|
||||||
if olddigest == m.hexdigest():
|
if olddigest == m.hexdigest():
|
||||||
continue
|
continue
|
||||||
|
@ -19,7 +19,7 @@ class Requestfinder(ToolBase.ToolBase):
|
|||||||
|
|
||||||
def fill_package_meta(self, project):
|
def fill_package_meta(self, project):
|
||||||
self.package_metas = dict()
|
self.package_metas = dict()
|
||||||
url = osc.core.makeurl(self.apiurl, ['search', 'package'], "match=[@project='%s']" % project)
|
url = osc.core.makeurl(self.apiurl, ['search', 'package'], f"match=[@project='{project}']")
|
||||||
root = ET.fromstring(self.cached_GET(url))
|
root = ET.fromstring(self.cached_GET(url))
|
||||||
for p in root.findall('package'):
|
for p in root.findall('package'):
|
||||||
name = p.attrib['name']
|
name = p.attrib['name']
|
||||||
@ -70,7 +70,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
|||||||
return tool
|
return tool
|
||||||
|
|
||||||
def _load_settings(self, settings, name):
|
def _load_settings(self, settings, name):
|
||||||
section = 'settings {}'.format(name)
|
section = f'settings {name}'
|
||||||
for option in settings.keys():
|
for option in settings.keys():
|
||||||
if self.cp.has_option(section, option):
|
if self.cp.has_option(section, option):
|
||||||
settings[option] = self.cp.get(section, option).replace('\n', ' ')
|
settings[option] = self.cp.get(section, option).replace('\n', ' ')
|
||||||
@ -209,7 +209,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
|||||||
rqs = self.tool.find_requests(settings)
|
rqs = self.tool.find_requests(settings)
|
||||||
for r in rqs:
|
for r in rqs:
|
||||||
self.print_actions(r)
|
self.print_actions(r)
|
||||||
print("osc rq {} -m '{}' {}".format(settings['action'], settings['message'], r.reqid))
|
print(f"osc rq {settings['action']} -m '{settings['message']}' {r.reqid}")
|
||||||
|
|
||||||
def help_examples(self):
|
def help_examples(self):
|
||||||
return """$ cat > ~/.config/opensuse-release-tools/requestfinder.conf << EOF
|
return """$ cat > ~/.config/opensuse-release-tools/requestfinder.conf << EOF
|
||||||
|
@ -299,7 +299,7 @@ class SkippkgFinder(object):
|
|||||||
if index in package_binaries:
|
if index in package_binaries:
|
||||||
selected_binarylist += package_binaries[index]
|
selected_binarylist += package_binaries[index]
|
||||||
else:
|
else:
|
||||||
logging.info("Can not find binary of %s" % index)
|
logging.info(f"Can not find binary of {index}")
|
||||||
|
|
||||||
# Some packages has been obsoleted by new updated package, however
|
# Some packages has been obsoleted by new updated package, however
|
||||||
# there are application still depend on old library when it builds
|
# there are application still depend on old library when it builds
|
||||||
|
@ -85,7 +85,7 @@ class InstallChecker(object):
|
|||||||
if provided_by.get('name') in built_binaries:
|
if provided_by.get('name') in built_binaries:
|
||||||
provided_found = True
|
provided_found = True
|
||||||
else:
|
else:
|
||||||
comments.append(' also provided by {} -> ignoring'.format(provided_by.get('name')))
|
comments.append(f" also provided by {provided_by.get('name')} -> ignoring")
|
||||||
alternative_found = True
|
alternative_found = True
|
||||||
|
|
||||||
if not alternative_found:
|
if not alternative_found:
|
||||||
@ -104,7 +104,7 @@ class InstallChecker(object):
|
|||||||
if result:
|
if result:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
comments.append('Error: missing alternative provides for {}'.format(provide))
|
comments.append(f'Error: missing alternative provides for {provide}')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@memoize(session=True)
|
@memoize(session=True)
|
||||||
@ -120,7 +120,7 @@ class InstallChecker(object):
|
|||||||
def check_delete_request(self, req, to_ignore, to_delete, comments):
|
def check_delete_request(self, req, to_ignore, to_delete, comments):
|
||||||
package = req.get('package')
|
package = req.get('package')
|
||||||
if package in to_ignore or self.ignore_deletes:
|
if package in to_ignore or self.ignore_deletes:
|
||||||
self.logger.info('Delete request for package {} ignored'.format(package))
|
self.logger.info(f'Delete request for package {package} ignored')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
pkg_flavors = self.pkg_with_multibuild_flavors(package)
|
pkg_flavors = self.pkg_with_multibuild_flavors(package)
|
||||||
@ -190,10 +190,10 @@ class InstallChecker(object):
|
|||||||
|
|
||||||
all_done = True
|
all_done = True
|
||||||
for arch in architectures:
|
for arch in architectures:
|
||||||
pra = '{}/{}/{}'.format(project, repository, arch)
|
pra = f'{project}/{repository}/{arch}'
|
||||||
buildid = self.buildid(project, repository, arch)
|
buildid = self.buildid(project, repository, arch)
|
||||||
if not buildid:
|
if not buildid:
|
||||||
self.logger.error('No build ID in {}'.format(pra))
|
self.logger.error(f'No build ID in {pra}')
|
||||||
return False
|
return False
|
||||||
buildids[arch] = buildid
|
buildids[arch] = buildid
|
||||||
url = self.report_url(project, repository, arch, buildid)
|
url = self.report_url(project, repository, arch, buildid)
|
||||||
@ -201,11 +201,11 @@ class InstallChecker(object):
|
|||||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||||
check = root.find('check[@name="installcheck"]/state')
|
check = root.find('check[@name="installcheck"]/state')
|
||||||
if check is not None and check.text != 'pending':
|
if check is not None and check.text != 'pending':
|
||||||
self.logger.info('{} already "{}", ignoring'.format(pra, check.text))
|
self.logger.info(f'{pra} already "{check.text}", ignoring')
|
||||||
else:
|
else:
|
||||||
all_done = False
|
all_done = False
|
||||||
except HTTPError:
|
except HTTPError:
|
||||||
self.logger.info('{} has no status report'.format(pra))
|
self.logger.info(f'{pra} has no status report')
|
||||||
all_done = False
|
all_done = False
|
||||||
|
|
||||||
if all_done and not force:
|
if all_done and not force:
|
||||||
@ -218,7 +218,7 @@ class InstallChecker(object):
|
|||||||
to_ignore = self.packages_to_ignore(project)
|
to_ignore = self.packages_to_ignore(project)
|
||||||
status = api.project_status(project)
|
status = api.project_status(project)
|
||||||
if status is None:
|
if status is None:
|
||||||
self.logger.error('no project status for {}'.format(project))
|
self.logger.error(f'no project status for {project}')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# collect packages to be deleted
|
# collect packages to be deleted
|
||||||
@ -282,9 +282,9 @@ class InstallChecker(object):
|
|||||||
if result:
|
if result:
|
||||||
self.report_state('success', self.gocd_url(), project, repository, buildids)
|
self.report_state('success', self.gocd_url(), project, repository, buildids)
|
||||||
else:
|
else:
|
||||||
result_comment.insert(0, 'Generated from {}\n'.format(self.gocd_url()))
|
result_comment.insert(0, f'Generated from {self.gocd_url()}\n')
|
||||||
self.report_state('failure', self.upload_failure(project, result_comment), project, repository, buildids)
|
self.report_state('failure', self.upload_failure(project, result_comment), project, repository, buildids)
|
||||||
self.logger.warning('Not accepting {}'.format(project))
|
self.logger.warning(f'Not accepting {project}')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return result
|
return result
|
||||||
@ -295,7 +295,7 @@ class InstallChecker(object):
|
|||||||
osc.core.http_PUT(url, data='\n'.join(comment))
|
osc.core.http_PUT(url, data='\n'.join(comment))
|
||||||
|
|
||||||
url = self.api.apiurl.replace('api.', 'build.')
|
url = self.api.apiurl.replace('api.', 'build.')
|
||||||
return '{}/package/view_file/home:repo-checker/reports/{}'.format(url, project)
|
return f'{url}/package/view_file/home:repo-checker/reports/{project}'
|
||||||
|
|
||||||
def report_state(self, state, report_url, project, repository, buildids):
|
def report_state(self, state, report_url, project, repository, buildids):
|
||||||
architectures = self.target_archs(project, repository)
|
architectures = self.target_archs(project, repository)
|
||||||
@ -357,10 +357,10 @@ class InstallChecker(object):
|
|||||||
return sorted(archs, reverse=True)
|
return sorted(archs, reverse=True)
|
||||||
|
|
||||||
def install_check(self, directories, arch, whitelist, ignored_conflicts):
|
def install_check(self, directories, arch, whitelist, ignored_conflicts):
|
||||||
self.logger.info('install check: start (whitelist:{})'.format(','.join(whitelist)))
|
self.logger.info(f"install check: start (whitelist:{','.join(whitelist)})")
|
||||||
parts = installcheck(directories, arch, whitelist, ignored_conflicts)
|
parts = installcheck(directories, arch, whitelist, ignored_conflicts)
|
||||||
if len(parts):
|
if len(parts):
|
||||||
header = '### [install check & file conflicts for {}]'.format(arch)
|
header = f'### [install check & file conflicts for {arch}]'
|
||||||
return CheckResult(False, header + '\n\n' + ('\n' + ('-' * 80) + '\n\n').join(parts))
|
return CheckResult(False, header + '\n\n' + ('\n' + ('-' * 80) + '\n\n').join(parts))
|
||||||
|
|
||||||
self.logger.info('install check: passed')
|
self.logger.info('install check: passed')
|
||||||
@ -373,7 +373,7 @@ class InstallChecker(object):
|
|||||||
self.allowed_cycles.append(comma_list.split(','))
|
self.allowed_cycles.append(comma_list.split(','))
|
||||||
|
|
||||||
def cycle_check(self, project, repository, arch):
|
def cycle_check(self, project, repository, arch):
|
||||||
self.logger.info('cycle check: start %s/%s/%s' % (project, repository, arch))
|
self.logger.info(f'cycle check: start {project}/{repository}/{arch}')
|
||||||
comment = []
|
comment = []
|
||||||
|
|
||||||
depinfo = builddepinfo(self.api.apiurl, project, repository, arch, order=False)
|
depinfo = builddepinfo(self.api.apiurl, project, repository, arch, order=False)
|
||||||
@ -387,7 +387,7 @@ class InstallChecker(object):
|
|||||||
break
|
break
|
||||||
if not allowed:
|
if not allowed:
|
||||||
cycled = [p.text for p in cycle.findall('package')]
|
cycled = [p.text for p in cycle.findall('package')]
|
||||||
comment.append('Package {} appears in cycle {}'.format(package, '/'.join(cycled)))
|
comment.append(f"Package {package} appears in cycle {'/'.join(cycled)}")
|
||||||
|
|
||||||
if len(comment):
|
if len(comment):
|
||||||
# New cycles, post comment.
|
# New cycles, post comment.
|
||||||
|
@ -25,7 +25,7 @@ class StagingReport(object):
|
|||||||
package.get('package'),
|
package.get('package'),
|
||||||
package.get('repository'),
|
package.get('repository'),
|
||||||
package.get('arch'))
|
package.get('arch'))
|
||||||
text = '[%s](%s)' % (package.get('arch'), link)
|
text = f"[{package.get('arch')}]({link})"
|
||||||
return text
|
return text
|
||||||
|
|
||||||
def old_enough(self, _date):
|
def old_enough(self, _date):
|
||||||
@ -56,13 +56,13 @@ class StagingReport(object):
|
|||||||
groups[package.get('package')].append(package)
|
groups[package.get('package')].append(package)
|
||||||
|
|
||||||
failing_lines = [
|
failing_lines = [
|
||||||
'* Build failed %s (%s)' % (key, ', '.join(self._package_url(p) for p in value))
|
f"* Build failed {key} ({', '.join(self._package_url(p) for p in value)})"
|
||||||
for key, value in groups.items()
|
for key, value in groups.items()
|
||||||
]
|
]
|
||||||
|
|
||||||
report = '\n'.join(failing_lines[:MAX_LINES])
|
report = '\n'.join(failing_lines[:MAX_LINES])
|
||||||
if len(failing_lines) > MAX_LINES:
|
if len(failing_lines) > MAX_LINES:
|
||||||
report += '* and more (%s) ...' % (len(failing_lines) - MAX_LINES)
|
report += f'* and more ({len(failing_lines) - MAX_LINES}) ...'
|
||||||
return report
|
return report
|
||||||
|
|
||||||
def report_checks(self, info):
|
def report_checks(self, info):
|
||||||
@ -70,7 +70,7 @@ class StagingReport(object):
|
|||||||
for check in info.findall('checks/check'):
|
for check in info.findall('checks/check'):
|
||||||
state = check.find('state').text
|
state = check.find('state').text
|
||||||
links_state.setdefault(state, [])
|
links_state.setdefault(state, [])
|
||||||
links_state[state].append('[{}]({})'.format(check.get('name'), check.find('url').text))
|
links_state[state].append(f"[{check.get('name')}]({check.find('url').text})")
|
||||||
|
|
||||||
lines = []
|
lines = []
|
||||||
failure = False
|
failure = False
|
||||||
@ -78,14 +78,14 @@ class StagingReport(object):
|
|||||||
if len(links) > MAX_LINES:
|
if len(links) > MAX_LINES:
|
||||||
extra = len(links) - MAX_LINES
|
extra = len(links) - MAX_LINES
|
||||||
links = links[:MAX_LINES]
|
links = links[:MAX_LINES]
|
||||||
links.append('and {} more...'.format(extra))
|
links.append(f'and {extra} more...')
|
||||||
|
|
||||||
lines.append('- {}'.format(state))
|
lines.append(f'- {state}')
|
||||||
if state != 'success':
|
if state != 'success':
|
||||||
lines.extend([' - {}'.format(link) for link in links])
|
lines.extend([f' - {link}' for link in links])
|
||||||
failure = True
|
failure = True
|
||||||
else:
|
else:
|
||||||
lines[-1] += ': {}'.format(', '.join(links))
|
lines[-1] += f": {', '.join(links)}"
|
||||||
|
|
||||||
return '\n'.join(lines).strip(), failure
|
return '\n'.join(lines).strip(), failure
|
||||||
|
|
||||||
|
@ -98,10 +98,10 @@ class StagingHelper(object):
|
|||||||
print("There is no support_pkg_rebuild file!")
|
print("There is no support_pkg_rebuild file!")
|
||||||
return
|
return
|
||||||
|
|
||||||
logging.info('Gathering support package list from %s' % self.project)
|
logging.info(f'Gathering support package list from {self.project}')
|
||||||
support_pkgs = self.get_support_package_list(self.project, 'standard')
|
support_pkgs = self.get_support_package_list(self.project, 'standard')
|
||||||
files = self.process_project_binarylist(self.project, 'standard', 'x86_64')
|
files = self.process_project_binarylist(self.project, 'standard', 'x86_64')
|
||||||
staging_projects = ["%s:%s" % (self.api.cstaging, p) for p in self.api.get_staging_projects_short()]
|
staging_projects = [f"{self.api.cstaging}:{p}" for p in self.api.get_staging_projects_short()]
|
||||||
cand_sources = defaultdict(list)
|
cand_sources = defaultdict(list)
|
||||||
for stg in staging_projects:
|
for stg in staging_projects:
|
||||||
status = self.api.project_status(stg, status=False)
|
status = self.api.project_status(stg, status=False)
|
||||||
@ -146,7 +146,7 @@ class StagingHelper(object):
|
|||||||
need_rebuild = True
|
need_rebuild = True
|
||||||
|
|
||||||
if need_rebuild and not self.api.is_repo_dirty(stgname, 'standard'):
|
if need_rebuild and not self.api.is_repo_dirty(stgname, 'standard'):
|
||||||
logging.info('Rebuild %s' % stgname)
|
logging.info(f'Rebuild {stgname}')
|
||||||
osc.core.rebuild(self.apiurl, stgname, None, None, None)
|
osc.core.rebuild(self.apiurl, stgname, None, None, None)
|
||||||
stg.find('rebuild').text = 'unneeded'
|
stg.find('rebuild').text = 'unneeded'
|
||||||
|
|
||||||
@ -174,7 +174,7 @@ if __name__ == '__main__':
|
|||||||
parser.add_argument('-d', '--debug', action='store_true',
|
parser.add_argument('-d', '--debug', action='store_true',
|
||||||
help='print info useful for debuging')
|
help='print info useful for debuging')
|
||||||
parser.add_argument('-p', '--project', dest='project', metavar='PROJECT',
|
parser.add_argument('-p', '--project', dest='project', metavar='PROJECT',
|
||||||
help='deafult project (default: %s)' % OPENSUSE,
|
help=f'deafult project (default: {OPENSUSE})',
|
||||||
default=OPENSUSE)
|
default=OPENSUSE)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
@ -71,14 +71,14 @@ class TestCase(unittest.TestCase):
|
|||||||
with open(OSCRC, 'w+') as f:
|
with open(OSCRC, 'w+') as f:
|
||||||
f.write('\n'.join([
|
f.write('\n'.join([
|
||||||
'[general]',
|
'[general]',
|
||||||
'apiurl = {}'.format(APIURL),
|
f'apiurl = {APIURL}',
|
||||||
'http_debug = false',
|
'http_debug = false',
|
||||||
'debug = false',
|
'debug = false',
|
||||||
'cookiejar = {}'.format(OSCCOOKIEJAR),
|
f'cookiejar = {OSCCOOKIEJAR}',
|
||||||
'[{}]'.format(APIURL),
|
f'[{APIURL}]',
|
||||||
'user = {}'.format(userid),
|
f'user = {userid}',
|
||||||
'pass = opensuse',
|
'pass = opensuse',
|
||||||
'email = {}@example.com'.format(userid),
|
f'email = {userid}@example.com',
|
||||||
# allow plain http even if it is insecure; we're testing after all
|
# allow plain http even if it is insecure; we're testing after all
|
||||||
'allow_http = 1',
|
'allow_http = 1',
|
||||||
# disable cert checking to allow self-signed certs
|
# disable cert checking to allow self-signed certs
|
||||||
@ -175,10 +175,10 @@ class TestCase(unittest.TestCase):
|
|||||||
for review in request.reviews:
|
for review in request.reviews:
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
if hasattr(review, key) and getattr(review, key) == value[0]:
|
if hasattr(review, key) and getattr(review, key) == value[0]:
|
||||||
self.assertEqual(review.state, value[1], '{}={} not {}'.format(key, value[0], value[1]))
|
self.assertEqual(review.state, value[1], f'{key}={value[0]} not {value[1]}')
|
||||||
return review
|
return review
|
||||||
|
|
||||||
self.fail('{} not found'.format(kwargs))
|
self.fail(f'{kwargs} not found')
|
||||||
|
|
||||||
def assertReviewScript(self, request_id, user, before, after, comment=None):
|
def assertReviewScript(self, request_id, user, before, after, comment=None):
|
||||||
"""Asserts the review script pointed by the ``script`` attribute of the current test can
|
"""Asserts the review script pointed by the ``script`` attribute of the current test can
|
||||||
@ -356,16 +356,16 @@ class StagingWorkflow(ABC):
|
|||||||
if name not in self.attr_types[namespace]:
|
if name not in self.attr_types[namespace]:
|
||||||
self.attr_types[namespace].append(name)
|
self.attr_types[namespace].append(name)
|
||||||
|
|
||||||
meta = """
|
meta = f"""
|
||||||
<namespace name='{}'>
|
<namespace name='{namespace}'>
|
||||||
<modifiable_by user='Admin'/>
|
<modifiable_by user='Admin'/>
|
||||||
</namespace>""".format(namespace)
|
</namespace>"""
|
||||||
url = osc.core.makeurl(APIURL, ['attribute', namespace, '_meta'])
|
url = osc.core.makeurl(APIURL, ['attribute', namespace, '_meta'])
|
||||||
osc.core.http_PUT(url, data=meta)
|
osc.core.http_PUT(url, data=meta)
|
||||||
|
|
||||||
meta = "<definition name='{}' namespace='{}'><description/>".format(name, namespace)
|
meta = f"<definition name='{name}' namespace='{namespace}'><description/>"
|
||||||
if values:
|
if values:
|
||||||
meta += "<count>{}</count>".format(values)
|
meta += f"<count>{values}</count>"
|
||||||
meta += "<modifiable_by role='maintainer'/></definition>"
|
meta += "<modifiable_by role='maintainer'/></definition>"
|
||||||
url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
|
url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
|
||||||
osc.core.http_PUT(url, data=meta)
|
osc.core.http_PUT(url, data=meta)
|
||||||
@ -418,11 +418,11 @@ class StagingWorkflow(ABC):
|
|||||||
:param users: list of users to be in group
|
:param users: list of users to be in group
|
||||||
:type users: list(str)
|
:type users: list(str)
|
||||||
"""
|
"""
|
||||||
meta = """
|
meta = f"""
|
||||||
<group>
|
<group>
|
||||||
<title>{}</title>
|
<title>{name}</title>
|
||||||
</group>
|
</group>
|
||||||
""".format(name)
|
"""
|
||||||
|
|
||||||
if len(users):
|
if len(users):
|
||||||
root = ET.fromstring(meta)
|
root = ET.fromstring(meta)
|
||||||
@ -450,13 +450,13 @@ class StagingWorkflow(ABC):
|
|||||||
"""
|
"""
|
||||||
if name in self.users:
|
if name in self.users:
|
||||||
return
|
return
|
||||||
meta = """
|
meta = f"""
|
||||||
<person>
|
<person>
|
||||||
<login>{}</login>
|
<login>{name}</login>
|
||||||
<email>{}@example.com</email>
|
<email>{name}@example.com</email>
|
||||||
<state>confirmed</state>
|
<state>confirmed</state>
|
||||||
</person>
|
</person>
|
||||||
""".format(name, name)
|
"""
|
||||||
self.users.append(name)
|
self.users.append(name)
|
||||||
url = osc.core.makeurl(APIURL, ['person', name])
|
url = osc.core.makeurl(APIURL, ['person', name])
|
||||||
osc.core.http_PUT(url, data=meta)
|
osc.core.http_PUT(url, data=meta)
|
||||||
@ -620,7 +620,7 @@ class StagingWorkflow(ABC):
|
|||||||
:type namespace: str
|
:type namespace: str
|
||||||
"""
|
"""
|
||||||
for name in self.attr_types[namespace]:
|
for name in self.attr_types[namespace]:
|
||||||
print('deleting attribute type {}:{}'.format(namespace, name))
|
print(f'deleting attribute type {namespace}:{name}')
|
||||||
url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
|
url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
|
||||||
self._safe_delete(url)
|
self._safe_delete(url)
|
||||||
print('deleting namespace', namespace)
|
print('deleting namespace', namespace)
|
||||||
@ -695,7 +695,7 @@ class FactoryWorkflow(StagingWorkflow):
|
|||||||
self.create_link(target_wine, self.projects['ring1'])
|
self.create_link(target_wine, self.projects['ring1'])
|
||||||
|
|
||||||
def create_staging(self, suffix, freeze=False, rings=None, with_repo=False):
|
def create_staging(self, suffix, freeze=False, rings=None, with_repo=False):
|
||||||
staging_key = 'staging:{}'.format(suffix)
|
staging_key = f'staging:{suffix}'
|
||||||
# do not reattach if already present
|
# do not reattach if already present
|
||||||
if staging_key not in self.projects:
|
if staging_key not in self.projects:
|
||||||
staging_name = self.project + ':Staging:' + suffix
|
staging_name = self.project + ':Staging:' + suffix
|
||||||
@ -846,11 +846,11 @@ class Project(object):
|
|||||||
:param with_repo: whether a repository should be created as part of the meta
|
:param with_repo: whether a repository should be created as part of the meta
|
||||||
:type with_repo: bool
|
:type with_repo: bool
|
||||||
"""
|
"""
|
||||||
meta = """
|
meta = f"""
|
||||||
<project name="{0}">
|
<project name="{self.name}">
|
||||||
<title></title>
|
<title></title>
|
||||||
<description></description>
|
<description></description>
|
||||||
</project>""".format(self.name)
|
</project>"""
|
||||||
|
|
||||||
root = ET.fromstring(meta)
|
root = ET.fromstring(meta)
|
||||||
for group in reviewer.get('groups', []):
|
for group in reviewer.get('groups', []):
|
||||||
@ -983,11 +983,11 @@ class Package(object):
|
|||||||
self.name = name
|
self.name = name
|
||||||
self.project = project
|
self.project = project
|
||||||
|
|
||||||
meta = """
|
meta = f"""
|
||||||
<package project="{1}" name="{0}">
|
<package project="{self.project.name}" name="{self.name}">
|
||||||
<title></title>
|
<title></title>
|
||||||
<description></description>
|
<description></description>
|
||||||
</package>""".format(self.name, self.project.name)
|
</package>"""
|
||||||
|
|
||||||
if devel_project:
|
if devel_project:
|
||||||
root = ET.fromstring(meta)
|
root = ET.fromstring(meta)
|
||||||
@ -996,7 +996,7 @@ class Package(object):
|
|||||||
|
|
||||||
url = osc.core.make_meta_url('pkg', (self.project.name, self.name), APIURL)
|
url = osc.core.make_meta_url('pkg', (self.project.name, self.name), APIURL)
|
||||||
osc.core.http_PUT(url, data=meta)
|
osc.core.http_PUT(url, data=meta)
|
||||||
print('created {}/{}'.format(self.project.name, self.name))
|
print(f'created {self.project.name}/{self.name}')
|
||||||
self.project.add_package(self)
|
self.project.add_package(self)
|
||||||
|
|
||||||
# delete from instance
|
# delete from instance
|
||||||
|
@ -64,7 +64,7 @@ class TestAccept(unittest.TestCase):
|
|||||||
# check which id was added
|
# check which id was added
|
||||||
new_id = (set(comments.keys()) - set(self.comments.keys())).pop()
|
new_id = (set(comments.keys()) - set(self.comments.keys())).pop()
|
||||||
comment = comments[new_id]['comment']
|
comment = comments[new_id]['comment']
|
||||||
ncomment = 'Project "{}" accepted. '.format(self.prj)
|
ncomment = f'Project "{self.prj}" accepted. '
|
||||||
ncomment += "The following packages have been submitted to openSUSE:Factory: wine."
|
ncomment += "The following packages have been submitted to openSUSE:Factory: wine."
|
||||||
self.assertEqual(ncomment, comment)
|
self.assertEqual(ncomment, comment)
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ class TestApiCalls(OBSLocal.TestCase):
|
|||||||
|
|
||||||
# Verify that review is closed
|
# Verify that review is closed
|
||||||
rq = self.winerq.xml()
|
rq = self.winerq.xml()
|
||||||
xpath = "//review[@name='new' and @by_project='{}']".format(self.staging_b.name)
|
xpath = f"//review[@name='new' and @by_project='{self.staging_b.name}']"
|
||||||
self.assertIsNotNone(rq.xpath(xpath))
|
self.assertIsNotNone(rq.xpath(xpath))
|
||||||
|
|
||||||
def test_add_sr(self):
|
def test_add_sr(self):
|
||||||
|
@ -79,7 +79,7 @@ class TestCheckSource(OBSLocal.TestCase):
|
|||||||
self.review_bot.check_requests()
|
self.review_bot.check_requests()
|
||||||
|
|
||||||
review = self.assertReview(req_id, by_user=(self.bot_user, 'declined'))
|
review = self.assertReview(req_id, by_user=(self.bot_user, 'declined'))
|
||||||
self.assertIn('%s is not a devel project of %s' % (SRC_PROJECT, PROJECT), review.comment)
|
self.assertIn(f'{SRC_PROJECT} is not a devel project of {PROJECT}', review.comment)
|
||||||
|
|
||||||
@pytest.mark.usefixtures("required_source_maintainer")
|
@pytest.mark.usefixtures("required_source_maintainer")
|
||||||
def test_devel_project(self):
|
def test_devel_project(self):
|
||||||
@ -192,11 +192,11 @@ class TestCheckSource(OBSLocal.TestCase):
|
|||||||
review = self.assertReview(req.reqid, by_user=(self.bot_user, 'declined'))
|
review = self.assertReview(req.reqid, by_user=(self.bot_user, 'declined'))
|
||||||
add_role_req = get_request_list(self.wf.apiurl, SRC_PROJECT, req_state=['new'], req_type='add_role')[0]
|
add_role_req = get_request_list(self.wf.apiurl, SRC_PROJECT, req_state=['new'], req_type='add_role')[0]
|
||||||
|
|
||||||
self.assertIn('unless %s is a maintainer of %s' % (FACTORY_MAINTAINERS, SRC_PROJECT), review.comment)
|
self.assertIn(f'unless {FACTORY_MAINTAINERS} is a maintainer of {SRC_PROJECT}', review.comment)
|
||||||
self.assertIn('Created the add_role request %s' % add_role_req.reqid, review.comment)
|
self.assertIn(f'Created the add_role request {add_role_req.reqid}', review.comment)
|
||||||
|
|
||||||
self.assertEqual(add_role_req.actions[0].tgt_project, SRC_PROJECT)
|
self.assertEqual(add_role_req.actions[0].tgt_project, SRC_PROJECT)
|
||||||
self.assertEqual('Created automatically from request %s' % req.reqid, add_role_req.description)
|
self.assertEqual(f'Created automatically from request {req.reqid}', add_role_req.description)
|
||||||
|
|
||||||
# reopen request and do it again to test that new add_role request won't be created
|
# reopen request and do it again to test that new add_role request won't be created
|
||||||
req.change_state('new')
|
req.change_state('new')
|
||||||
@ -247,11 +247,11 @@ class TestCheckSource(OBSLocal.TestCase):
|
|||||||
review = self.assertReview(req.reqid, by_user=(self.bot_user, 'declined'))
|
review = self.assertReview(req.reqid, by_user=(self.bot_user, 'declined'))
|
||||||
add_role_req = get_request_list(self.wf.apiurl, SRC_PROJECT, req_state=['new'], req_type='add_role')[0]
|
add_role_req = get_request_list(self.wf.apiurl, SRC_PROJECT, req_state=['new'], req_type='add_role')[0]
|
||||||
|
|
||||||
self.assertIn('unless %s is a maintainer of %s' % (FACTORY_MAINTAINERS, SRC_PROJECT), review.comment)
|
self.assertIn(f'unless {FACTORY_MAINTAINERS} is a maintainer of {SRC_PROJECT}', review.comment)
|
||||||
self.assertIn('Created the add_role request %s' % add_role_req.reqid, review.comment)
|
self.assertIn(f'Created the add_role request {add_role_req.reqid}', review.comment)
|
||||||
|
|
||||||
self.assertEqual(add_role_req.actions[0].tgt_project, SRC_PROJECT)
|
self.assertEqual(add_role_req.actions[0].tgt_project, SRC_PROJECT)
|
||||||
self.assertEqual('Created automatically from request %s' % req.reqid, add_role_req.description)
|
self.assertEqual(f'Created automatically from request {req.reqid}', add_role_req.description)
|
||||||
|
|
||||||
@pytest.mark.usefixtures("default_config")
|
@pytest.mark.usefixtures("default_config")
|
||||||
def test_bad_rpmlintrc(self):
|
def test_bad_rpmlintrc(self):
|
||||||
|
@ -15,8 +15,8 @@ class TestComment(unittest.TestCase):
|
|||||||
self.api = CommentAPI('bogus')
|
self.api = CommentAPI('bogus')
|
||||||
self.bot = type(self).__name__
|
self.bot = type(self).__name__
|
||||||
self.comments = {
|
self.comments = {
|
||||||
1: {'comment': '<!-- {} -->\n\nshort comment'.format(self.bot)},
|
1: {'comment': f'<!-- {self.bot} -->\n\nshort comment'},
|
||||||
2: {'comment': '<!-- {} foo=bar distro=openSUSE -->\n\nshort comment'.format(self.bot)}
|
2: {'comment': f'<!-- {self.bot} foo=bar distro=openSUSE -->\n\nshort comment'}
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_truncate(self):
|
def test_truncate(self):
|
||||||
@ -52,7 +52,7 @@ handle
|
|||||||
truncated = self.api.truncate(comment, length=i)
|
truncated = self.api.truncate(comment, length=i)
|
||||||
print('=' * 80)
|
print('=' * 80)
|
||||||
print(truncated)
|
print(truncated)
|
||||||
self.assertTrue(len(truncated) <= i, '{} <= {}'.format(len(truncated), i))
|
self.assertTrue(len(truncated) <= i, f'{len(truncated)} <= {i}')
|
||||||
self.assertEqual(truncated.count('<pre>'), truncated.count('</pre>'))
|
self.assertEqual(truncated.count('<pre>'), truncated.count('</pre>'))
|
||||||
self.assertFalse(len(re.findall(r'</?\w+[^\w>]', truncated)))
|
self.assertFalse(len(re.findall(r'</?\w+[^\w>]', truncated)))
|
||||||
tag_count = truncated.count('<pre>') + truncated.count('</pre>')
|
tag_count = truncated.count('<pre>') + truncated.count('</pre>')
|
||||||
|
@ -25,7 +25,7 @@ class MockedContainerCleaner(ContainerCleaner):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Path %s not expected" % path)
|
raise RuntimeError(f"Path {path} not expected")
|
||||||
|
|
||||||
def getDirBinaries(self, path):
|
def getDirBinaries(self, path):
|
||||||
"""Mock certain OBS APIs returning a list of binaries"""
|
"""Mock certain OBS APIs returning a list of binaries"""
|
||||||
@ -37,7 +37,7 @@ class MockedContainerCleaner(ContainerCleaner):
|
|||||||
|
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Path %s not expected" % path)
|
raise RuntimeError(f"Path {path} not expected")
|
||||||
|
|
||||||
|
|
||||||
class TestContainerCleaner(unittest.TestCase):
|
class TestContainerCleaner(unittest.TestCase):
|
||||||
|
@ -28,13 +28,13 @@ class TestFreeze(OBSLocal.TestCase):
|
|||||||
|
|
||||||
fp = self._get_fixture_path('staging-meta-for-bootstrap-copy.xml')
|
fp = self._get_fixture_path('staging-meta-for-bootstrap-copy.xml')
|
||||||
fc.prj = 'openSUSE:Factory:Staging:A'
|
fc.prj = 'openSUSE:Factory:Staging:A'
|
||||||
fixture = subprocess.check_output('/usr/bin/xmllint --format %s' % fp, shell=True).decode('utf-8')
|
fixture = subprocess.check_output(f'/usr/bin/xmllint --format {fp}', shell=True).decode('utf-8')
|
||||||
|
|
||||||
f = tempfile.NamedTemporaryFile(delete=False)
|
f = tempfile.NamedTemporaryFile(delete=False)
|
||||||
f.write(fc.prj_meta_for_bootstrap_copy())
|
f.write(fc.prj_meta_for_bootstrap_copy())
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
output = subprocess.check_output('/usr/bin/xmllint --format %s' % f.name, shell=True).decode('utf-8')
|
output = subprocess.check_output(f'/usr/bin/xmllint --format {f.name}', shell=True).decode('utf-8')
|
||||||
|
|
||||||
for line in difflib.unified_diff(fixture.split("\n"), output.split("\n")):
|
for line in difflib.unified_diff(fixture.split("\n"), output.split("\n")):
|
||||||
print(line)
|
print(line)
|
||||||
|
@ -189,7 +189,7 @@ class TestOrigin(OBSLocal.TestCase):
|
|||||||
CommentAPI(self.wf.api.apiurl).add_comment(
|
CommentAPI(self.wf.api.apiurl).add_comment(
|
||||||
request_id=request.reqid, comment=f'@{self.bot_user} change_devel')
|
request_id=request.reqid, comment=f'@{self.bot_user} change_devel')
|
||||||
|
|
||||||
comment = 'change_devel command by {}'.format('Admin')
|
comment = 'change_devel command by Admin'
|
||||||
else:
|
else:
|
||||||
comment = 'only devel origin allowed'
|
comment = 'only devel origin allowed'
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ class ToTestManager(ToolBase.ToolBase):
|
|||||||
self.api = StagingAPI(self.apiurl, project=project)
|
self.api = StagingAPI(self.apiurl, project=project)
|
||||||
|
|
||||||
def version_file(self, target):
|
def version_file(self, target):
|
||||||
return 'version_%s' % target
|
return f'version_{target}'
|
||||||
|
|
||||||
def write_version_to_dashboard(self, target, version):
|
def write_version_to_dashboard(self, target, version):
|
||||||
if self.dryrun or self.project.do_not_release:
|
if self.dryrun or self.project.do_not_release:
|
||||||
@ -70,7 +70,7 @@ class ToTestManager(ToolBase.ToolBase):
|
|||||||
r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz|\.tar\.xz|\.raw\.xz|\.appx)', binary)
|
r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz|\.tar\.xz|\.raw\.xz|\.appx)', binary)
|
||||||
if result:
|
if result:
|
||||||
return result.group(1)
|
return result.group(1)
|
||||||
raise NotFoundException("can't find %s iso version" % project)
|
raise NotFoundException(f"can't find {project} iso version")
|
||||||
|
|
||||||
def version_from_totest_project(self):
|
def version_from_totest_project(self):
|
||||||
if len(self.project.main_products):
|
if len(self.project.main_products):
|
||||||
@ -103,7 +103,7 @@ class ToTestManager(ToolBase.ToolBase):
|
|||||||
result = re.match(r'.*-Build(.*)-Media1.report', binary)
|
result = re.match(r'.*-Build(.*)-Media1.report', binary)
|
||||||
if result:
|
if result:
|
||||||
return result.group(1)
|
return result.group(1)
|
||||||
raise NotFoundException("can't find %s ftp version" % project)
|
raise NotFoundException(f"can't find {project} ftp version")
|
||||||
|
|
||||||
# make sure to update the attribute as atomic as possible - as such
|
# make sure to update the attribute as atomic as possible - as such
|
||||||
# only update the snapshot and don't erase anything else. The snapshots
|
# only update the snapshot and don't erase anything else. The snapshots
|
||||||
@ -113,7 +113,7 @@ class ToTestManager(ToolBase.ToolBase):
|
|||||||
status_dict = self.get_status_dict()
|
status_dict = self.get_status_dict()
|
||||||
if status_dict.get(status) == snapshot:
|
if status_dict.get(status) == snapshot:
|
||||||
return
|
return
|
||||||
self.logger.info('setting {} snapshot to {} (previously {})'.format(status, snapshot, status_dict.get(status)))
|
self.logger.info(f'setting {status} snapshot to {snapshot} (previously {status_dict.get(status)})')
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
return
|
return
|
||||||
if status_dict.get(status) != snapshot:
|
if status_dict.get(status) != snapshot:
|
||||||
@ -152,7 +152,7 @@ class ToTestManager(ToolBase.ToolBase):
|
|||||||
|
|
||||||
url = self.api.makeurl(baseurl, query=query)
|
url = self.api.makeurl(baseurl, query=query)
|
||||||
if self.dryrun or self.project.do_not_release:
|
if self.dryrun or self.project.do_not_release:
|
||||||
self.logger.info('release %s/%s (%s)' % (project, package, query))
|
self.logger.info(f'release {project}/{package} ({query})')
|
||||||
else:
|
else:
|
||||||
self.api.retried_POST(url)
|
self.api.retried_POST(url)
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ class ToTestPublisher(ToTestManager):
|
|||||||
self.failed_ignored_jobs = []
|
self.failed_ignored_jobs = []
|
||||||
|
|
||||||
if len(jobs) < self.project.jobs_num: # not yet scheduled
|
if len(jobs) < self.project.jobs_num: # not yet scheduled
|
||||||
self.logger.warning('we have only %s jobs' % len(jobs))
|
self.logger.warning(f'we have only {len(jobs)} jobs')
|
||||||
return QAResult.inprogress
|
return QAResult.inprogress
|
||||||
|
|
||||||
in_progress = False
|
in_progress = False
|
||||||
@ -92,7 +92,7 @@ class ToTestPublisher(ToTestManager):
|
|||||||
# remove flag - unfortunately can't delete comment unless admin
|
# remove flag - unfortunately can't delete comment unless admin
|
||||||
data = {'text': text}
|
data = {'text': text}
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
self.logger.info('Would label {} with: {}'.format(job['id'], text))
|
self.logger.info(f"Would label {job['id']} with: {text}")
|
||||||
else:
|
else:
|
||||||
self.openqa.openqa_request(
|
self.openqa.openqa_request(
|
||||||
'PUT', 'jobs/%s/comments/%d' % (job['id'], labeled), data=data)
|
'PUT', 'jobs/%s/comments/%d' % (job['id'], labeled), data=data)
|
||||||
@ -103,12 +103,12 @@ class ToTestPublisher(ToTestManager):
|
|||||||
if not labeled and len(refs) > 0:
|
if not labeled and len(refs) > 0:
|
||||||
data = {'text': 'label:unknown_failure'}
|
data = {'text': 'label:unknown_failure'}
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
self.logger.info('Would label {} as unknown'.format(job['id']))
|
self.logger.info(f"Would label {job['id']} as unknown")
|
||||||
else:
|
else:
|
||||||
self.openqa.openqa_request(
|
self.openqa.openqa_request(
|
||||||
'POST', 'jobs/%s/comments' % job['id'], data=data)
|
'POST', f"jobs/{job['id']}/comments", data=data)
|
||||||
|
|
||||||
joburl = '%s/tests/%s' % (self.project.openqa_server, job['id'])
|
joburl = f"{self.project.openqa_server}/tests/{job['id']}"
|
||||||
self.logger.info('job %s failed, see %s', job['name'], joburl)
|
self.logger.info('job %s failed, see %s', job['name'], joburl)
|
||||||
|
|
||||||
elif job['result'] == 'passed' or job['result'] == 'softfailed':
|
elif job['result'] == 'passed' or job['result'] == 'softfailed':
|
||||||
@ -137,7 +137,7 @@ class ToTestPublisher(ToTestManager):
|
|||||||
|
|
||||||
self.logger.debug('Sending AMQP message')
|
self.logger.debug('Sending AMQP message')
|
||||||
inf = re.sub(r'ed$', '', str(current_result))
|
inf = re.sub(r'ed$', '', str(current_result))
|
||||||
msg_topic = '%s.ttm.build.%s' % (self.project.base.lower(), inf)
|
msg_topic = f'{self.project.base.lower()}.ttm.build.{inf}'
|
||||||
msg_body = json.dumps({
|
msg_body = json.dumps({
|
||||||
'build': current_snapshot,
|
'build': current_snapshot,
|
||||||
'project': self.project.name,
|
'project': self.project.name,
|
||||||
@ -158,9 +158,9 @@ class ToTestPublisher(ToTestManager):
|
|||||||
notify_connection.close()
|
notify_connection.close()
|
||||||
break
|
break
|
||||||
except pika.exceptions.ConnectionClosed as e:
|
except pika.exceptions.ConnectionClosed as e:
|
||||||
self.logger.warning('Sending AMQP event did not work: %s. Retrying try %s out of %s' % (e, t, tries))
|
self.logger.warning(f'Sending AMQP event did not work: {e}. Retrying try {t} out of {tries}')
|
||||||
else:
|
else:
|
||||||
self.logger.error('Could not send out AMQP event for %s tries, aborting.' % tries)
|
self.logger.error(f'Could not send out AMQP event for {tries} tries, aborting.')
|
||||||
|
|
||||||
def publish(self, project, force=False):
|
def publish(self, project, force=False):
|
||||||
self.setup(project)
|
self.setup(project)
|
||||||
@ -179,7 +179,7 @@ class ToTestPublisher(ToTestManager):
|
|||||||
current_snapshot = self.get_status('testing')
|
current_snapshot = self.get_status('testing')
|
||||||
|
|
||||||
if self.get_status('publishing') == current_snapshot:
|
if self.get_status('publishing') == current_snapshot:
|
||||||
self.logger.info('{} is already publishing'.format(current_snapshot))
|
self.logger.info(f'{current_snapshot} is already publishing')
|
||||||
# migrating - if there is no published entry, the last publish call
|
# migrating - if there is no published entry, the last publish call
|
||||||
# didn't wait for publish - and as such didn't set published state
|
# didn't wait for publish - and as such didn't set published state
|
||||||
if self.get_status('published') != current_snapshot:
|
if self.get_status('published') != current_snapshot:
|
||||||
@ -189,8 +189,8 @@ class ToTestPublisher(ToTestManager):
|
|||||||
current_result = self.overall_result(current_snapshot)
|
current_result = self.overall_result(current_snapshot)
|
||||||
current_qa_version = self.current_qa_version()
|
current_qa_version = self.current_qa_version()
|
||||||
|
|
||||||
self.logger.info('current_snapshot {}: {}'.format(current_snapshot, str(current_result)))
|
self.logger.info(f'current_snapshot {current_snapshot}: {str(current_result)}')
|
||||||
self.logger.debug('current_qa_version {}'.format(current_qa_version))
|
self.logger.debug(f'current_qa_version {current_qa_version}')
|
||||||
|
|
||||||
self.send_amqp_event(current_snapshot, current_result)
|
self.send_amqp_event(current_snapshot, current_result)
|
||||||
|
|
||||||
@ -231,7 +231,7 @@ class ToTestPublisher(ToTestManager):
|
|||||||
|
|
||||||
current_snapshot = self.get_status('publishing')
|
current_snapshot = self.get_status('publishing')
|
||||||
if self.dryrun:
|
if self.dryrun:
|
||||||
self.logger.info('Publisher finished, updating published snapshot to {}'.format(current_snapshot))
|
self.logger.info(f'Publisher finished, updating published snapshot to {current_snapshot}')
|
||||||
return
|
return
|
||||||
|
|
||||||
self.update_status('published', current_snapshot)
|
self.update_status('published', current_snapshot)
|
||||||
@ -263,8 +263,8 @@ class ToTestPublisher(ToTestManager):
|
|||||||
return
|
return
|
||||||
|
|
||||||
status_flag = 'published'
|
status_flag = 'published'
|
||||||
data = {'text': 'tag:{}:{}:{}'.format(snapshot, status_flag, status_flag)}
|
data = {'text': f'tag:{snapshot}:{status_flag}:{status_flag}'}
|
||||||
self.openqa.openqa_request('POST', 'groups/%s/comments' % group_id, data=data)
|
self.openqa.openqa_request('POST', f'groups/{group_id}/comments', data=data)
|
||||||
|
|
||||||
def openqa_group_id(self):
|
def openqa_group_id(self):
|
||||||
url = makeurl(self.project.openqa_server,
|
url = makeurl(self.project.openqa_server,
|
||||||
|
@ -38,7 +38,7 @@ class ToTestReleaser(ToTestManager):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if testing_snapshot != self.get_status('failed') and testing_snapshot != self.get_status('published'):
|
if testing_snapshot != self.get_status('failed') and testing_snapshot != self.get_status('published'):
|
||||||
self.logger.debug('Snapshot {} is still in progress'.format(testing_snapshot))
|
self.logger.debug(f'Snapshot {testing_snapshot} is still in progress')
|
||||||
return QAResult.inprogress
|
return QAResult.inprogress
|
||||||
|
|
||||||
self.logger.info('testing snapshot %s', testing_snapshot)
|
self.logger.info('testing snapshot %s', testing_snapshot)
|
||||||
@ -60,7 +60,7 @@ class ToTestReleaser(ToTestManager):
|
|||||||
|
|
||||||
def release_version(self):
|
def release_version(self):
|
||||||
url = self.api.makeurl(['build', self.project.name, 'standard', self.project.arch,
|
url = self.api.makeurl(['build', self.project.name, 'standard', self.project.arch,
|
||||||
'000release-packages:%s-release' % self.project.base])
|
f'000release-packages:{self.project.base}-release'])
|
||||||
f = self.api.retried_GET(url)
|
f = self.api.retried_GET(url)
|
||||||
root = ET.parse(f).getroot()
|
root = ET.parse(f).getroot()
|
||||||
for binary in root.findall('binary'):
|
for binary in root.findall('binary'):
|
||||||
@ -69,7 +69,7 @@ class ToTestReleaser(ToTestManager):
|
|||||||
if result:
|
if result:
|
||||||
return result.group(1)
|
return result.group(1)
|
||||||
|
|
||||||
raise NotFoundException("can't find %s version" % self.project.name)
|
raise NotFoundException(f"can't find {self.project.name} version")
|
||||||
|
|
||||||
def version_from_project(self):
|
def version_from_project(self):
|
||||||
if not self.project.take_source_from_product:
|
if not self.project.take_source_from_product:
|
||||||
@ -109,11 +109,11 @@ class ToTestReleaser(ToTestManager):
|
|||||||
|
|
||||||
if any(failed):
|
if any(failed):
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
'%s %s %s %s -> %s' % (project, package, repository, arch, failed[0].get('code')))
|
f"{project} {package} {repository} {arch} -> {failed[0].get('code')}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not len(root.findall('result/status[@code="succeeded"]')):
|
if not len(root.findall('result/status[@code="succeeded"]')):
|
||||||
self.logger.info('No "succeeded" for %s %s %s %s' % (project, package, repository, arch))
|
self.logger.info(f'No "succeeded" for {project} {package} {repository} {arch}')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
maxsize = self.maxsize_for_package(package, arch)
|
maxsize = self.maxsize_for_package(package, arch)
|
||||||
@ -220,12 +220,12 @@ class ToTestReleaser(ToTestManager):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
if len(self.project.livecd_products):
|
if len(self.project.livecd_products):
|
||||||
if not self.all_repos_done('%s:Live' % self.project.name):
|
if not self.all_repos_done(f'{self.project.name}:Live'):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for product in self.project.livecd_products:
|
for product in self.project.livecd_products:
|
||||||
for arch in product.archs:
|
for arch in product.archs:
|
||||||
if not self.package_ok('%s:Live' % self.project.name, product.package,
|
if not self.package_ok(f'{self.project.name}:Live', product.package,
|
||||||
self.project.product_repo, arch):
|
self.project.product_repo, arch):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -300,7 +300,7 @@ class ToTestReleaser(ToTestManager):
|
|||||||
snapshot = None
|
snapshot = None
|
||||||
if snapshot:
|
if snapshot:
|
||||||
release = self.project.snapshot_number_prefix + snapshot
|
release = self.project.snapshot_number_prefix + snapshot
|
||||||
self.logger.info('Updating snapshot %s' % snapshot)
|
self.logger.info(f'Updating snapshot {snapshot}')
|
||||||
else:
|
else:
|
||||||
release = None
|
release = None
|
||||||
if not (self.dryrun or self.project.do_not_release):
|
if not (self.dryrun or self.project.do_not_release):
|
||||||
|
@ -53,7 +53,7 @@ class ToTest(object):
|
|||||||
|
|
||||||
self.jobs_num = 42
|
self.jobs_num = 42
|
||||||
self.load_config(apiurl)
|
self.load_config(apiurl)
|
||||||
self.test_project = '%s:%s' % (project, self.test_subproject)
|
self.test_project = f'{project}:{self.test_subproject}'
|
||||||
|
|
||||||
def load_config(self, apiurl):
|
def load_config(self, apiurl):
|
||||||
config = yaml.safe_load(attribute_value_load(apiurl, self.name, 'ToTestManagerConfig'))
|
config = yaml.safe_load(attribute_value_load(apiurl, self.name, 'ToTestManagerConfig'))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user