openSUSE-release-tools/openqa-maintenance.py

714 lines
26 KiB
Python
Raw Normal View History

#!/usr/bin/python
# Copyright (c) 2015,2016 SUSE LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from optparse import OptionParser
from pprint import pformat, pprint
import cmdln
import logging
import os
import re
import sys
import time
from datetime import date
import md5
from simplejson import JSONDecodeError
from collections import namedtuple
try:
from xml.etree import cElementTree as ET
except ImportError:
import cElementTree as ET
import osc.conf
import osc.core
from osclib.comments import CommentAPI
import ReviewBot
from openqa_client.client import OpenQA_Client
from openqa_client import exceptions as openqa_exceptions
Package = namedtuple('Package', ('name', 'version', 'release'))
pkgname_re = re.compile(r'(?P<name>.+)-(?P<version>[^-]+)-(?P<release>[^-]+)\.(?P<arch>[^.]+)\.rpm')
# QA Results
QA_UNKNOWN = 0
QA_INPROGRESS = 1
QA_FAILED = 2
QA_PASSED = 3
comment_marker_re = re.compile(r'<!-- openqa state=(?P<state>done|seen)(?: result=(?P<result>accepted|declined))? -->')
logger = None
2016-04-28 17:00:50 +02:00
request_name_cache = {}
2016-04-28 14:35:58 +02:00
# old stuff, for reference
# def filterchannel(self, apiurl, prj, packages):
# """ filter list of package objects to only include those actually released into prj"""
#
# prefix = 'SUSE:Updates:'
# logger.debug(prj)
# if not prj.startswith(prefix):
# return packages
#
# channel = prj[len(prefix):].replace(':', '_')
#
# url = osc.core.makeurl(apiurl, ('source', 'SUSE:Channels', channel, '_channel'))
# root = ET.parse(osc.core.http_GET(url)).getroot()
#
# package_names = set([p.name for p in packages])
# in_channel = set([p.attrib['name'] for p in root.iter('binary') if p.attrib['name'] in package_names])
#
# return [p for p in packages if p.name in in_channel]
class Update(object):
def __init__(self, settings):
self._settings = settings
self._settings['_NOOBSOLETEBUILD'] = '1'
def settings(self, src_prj, dst_prj, packages, req=None):
return self._settings.copy()
2016-04-28 14:35:58 +02:00
class SUSEUpdate(Update):
# take the first package name we find - often enough correct
def request_name(self, req):
2016-04-28 17:00:50 +02:00
if req.reqid not in request_name_cache:
request_name_cache[req.reqid] = self._request_name(req)
return request_name_cache[req.reqid]
2016-04-28 14:35:58 +02:00
2016-04-28 17:00:50 +02:00
def _request_name(self, req):
2016-04-28 14:35:58 +02:00
for action in req.get_actions('maintenance_release'):
if action.tgt_package.startswith('patchinfo'):
continue
url = osc.core.makeurl(
req.apiurl,
('source', action.src_project, action.src_package, '_link'))
root = ET.parse(osc.core.http_GET(url)).getroot()
if root.attrib.get('cicount'):
continue
2016-04-28 17:00:50 +02:00
return action.tgt_package
2016-04-28 14:35:58 +02:00
2016-04-28 17:00:50 +02:00
return 'unknown'
2016-04-28 14:35:58 +02:00
def settings(self, src_prj, dst_prj, packages, req=None):
2016-04-28 17:00:50 +02:00
settings = super(SUSEUpdate, self).settings(src_prj, dst_prj, packages, req)
2016-04-28 14:35:58 +02:00
# start with a colon so it looks cool behind 'Build' :/
settings['BUILD'] = ':' + req.reqid + '.' + self.request_name(req)
repo = 'http://download.suse.de/ibs/%s/%s/' % (src_prj.replace(':', ':/'), dst_prj.replace(':', '_'))
settings['MINIMAL_TEST_REPO'] = repo
return settings
class openSUSEUpdate(Update):
def settings(self, src_prj, dst_prj, packages, req=None):
2016-04-28 17:00:50 +02:00
settings = super(openSUSEUpdate, self).settings(src_prj, dst_prj, packages, req)
settings['BUILD'] = src_prj
if req:
settings['BUILD'] += ':' + req.reqid
# openSUSE:Maintenance key
settings['IMPORT_GPG_KEYS'] = 'gpg-pubkey-b3fd7e48-5549fd0f'
settings['ZYPPER_ADD_REPO_PREFIX'] = 'incident'
if packages:
# XXX: this may fail in various ways
# - conflicts between subpackages
# - added packages
# - conflicts with installed packages (e.g sendmail vs postfix)
settings['INSTALL_PACKAGES'] = ' '.join(set([p.name for p in packages]))
settings['VERIFY_PACKAGE_VERSIONS'] = ' '.join(['{} {}-{}'.format(p.name, p.version, p.release) for p in packages])
settings['ZYPPER_ADD_REPOS'] = 'http://download.opensuse.org/repositories/%s/%s/' % (src_prj.replace(':', ':/'), dst_prj.replace(':', '_'))
settings['ADDONURL'] = settings['ZYPPER_ADD_REPOS']
settings['ISO'] = 'openSUSE-Leap-42.1-DVD-x86_64.iso'
settings['WITH_MAIN_REPO'] = 1
settings['WITH_UPDATE_REPO'] = 1
return settings
class TestUpdate(openSUSEUpdate):
def settings(self, src_prj, dst_prj, packages, req=None):
2016-04-28 17:00:50 +02:00
settings = super(TestUpdate, self).settings(src_prj, dst_prj, packages, req)
settings['IMPORT_GPG_KEYS'] = 'testkey'
return settings
TARGET_REPO_SETTINGS = {
'SUSE:Updates:SLE-SERVER:12:x86_64': {
'repos': [
'http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SERVER:/12:/x86_64/update',
'http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SDK:/12:/x86_64/update/'
],
'settings': {
'DISTRI': 'sle',
'VERSION': '12',
'FLAVOR': 'Server-DVD-UpdateTest',
'ARCH': 'x86_64',
},
'test': 'qam-gnome'
},
'SUSE:Updates:SLE-SERVER:12-SP1:x86_64': {
'repos' : [
'http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SERVER:/12-SP1:/x86_64/update',
'http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SDK:/12-SP1:/x86_64/update/'
],
'settings': {
'DISTRI': 'sle',
'VERSION': '12-SP1',
'FLAVOR': 'Server-DVD-UpdateTest',
'ARCH': 'x86_64'
},
'test': 'qam-gnome'
},
'SUSE:Updates:SLE-DESKTOP:12:x86_64': {
'repos': [
'http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-DESKTOP:/12:/x86_64/update',
'http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SDK:/12:/x86_64/update/'
],
'settings': {
'DISTRI': 'sle',
'VERSION': '12',
'FLAVOR': 'Desktop-DVD-UpdateTest',
'ARCH': 'x86_64',
},
'test': 'qam-gnome'
},
'SUSE:Updates:SLE-DESKTOP:12-SP1:x86_64': {
'repos': [
'http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-DESKTOP:/12-SP1:/x86_64/update',
'http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SDK:/12:/x86_64/update/'
],
'settings': {
'DISTRI': 'sle',
'VERSION': '12-SP1',
'FLAVOR': 'Desktop-DVD-UpdateTest',
'ARCH': 'x86_64',
},
'test': 'qam-gnome'
},
}
PROJECT_OPENQA_SETTINGS = {
'openSUSE:13.2:Update': [
openSUSEUpdate(
{
'DISTRI': 'opensuse',
'VERSION': '13.2',
'FLAVOR': 'Maintenance',
'ARCH': 'x86_64',
}),
openSUSEUpdate(
{
'DISTRI': 'opensuse',
'VERSION': '13.2',
'FLAVOR': 'Maintenance',
'ARCH': 'i586',
}),
],
2016-04-28 14:35:58 +02:00
'SUSE:Updates:SLE-SERVER:12:x86_64': [
SUSEUpdate(
{
'DISTRI': 'sle',
'VERSION': '12',
'FLAVOR': 'Server-DVD-Incidents',
'ARCH': 'x86_64'
}),
],
'SUSE:Updates:SLE-SERVER:12:ppc64le': [
SUSEUpdate(
{
'DISTRI': 'sle',
'VERSION': '12',
'FLAVOR': 'Server-DVD-Incidents',
'ARCH': 'ppc64le'
}),
],
'SUSE:Updates:SLE-SERVER:12:s390x': [
SUSEUpdate(
{
'DISTRI': 'sle',
'VERSION': '12',
'FLAVOR': 'Server-DVD-Incidents',
'ARCH': 's390x'
}),
],
'SUSE:Updates:SLE-SERVER:12-SP1:x86_64': [
SUSEUpdate(
{
'DISTRI': 'sle',
'VERSION': '12-SP1',
'FLAVOR': 'Server-DVD-Incidents',
'ARCH': 'x86_64'
}),
],
'SUSE:Updates:SLE-SERVER:12-SP1:ppc64le': [
SUSEUpdate(
{
'DISTRI': 'sle',
'VERSION': '12-SP1',
'FLAVOR': 'Server-DVD-Incidents',
'ARCH': 'ppc64le'
}),
],
'SUSE:Updates:SLE-SERVER:12-SP1:s390x': [
SUSEUpdate(
{
'DISTRI': 'sle',
'VERSION': '12-SP1',
'FLAVOR': 'Server-DVD-Incidents',
'ARCH': 's390x'
}),
],
'openSUSE:Leap:42.1:Update': [
openSUSEUpdate(
{
'DISTRI': 'opensuse',
'VERSION': '42.1',
'FLAVOR': 'Maintenance',
'ARCH': 'x86_64',
}),
],
}
class OpenQABot(ReviewBot.ReviewBot):
""" check ABI of library packages
"""
def __init__(self, *args, **kwargs):
self.force = False
self.openqa = None
self.do_comments = True
if 'force' in kwargs:
if kwargs['force'] is True:
self.force = True
del kwargs['force']
if 'openqa' in kwargs:
self.openqa = OpenQA_Client(server=kwargs['openqa'])
del kwargs['openqa']
if 'do_comments' in kwargs:
if kwargs['do_comments'] is not None:
self.do_comments = kwargs['do_comments']
del kwargs['do_comments']
ReviewBot.ReviewBot.__init__(self, *args, **kwargs)
self.logger.debug(self.do_comments)
self.commentapi = CommentAPI(self.apiurl)
self.update_test_builds = dict()
def prepare_review(self):
for prj, u in TARGET_REPO_SETTINGS.items():
self.trigger_build_for_target(prj, u)
def check_action_maintenance_release(self, req, a):
# we only look at the binaries of the patchinfo
if a.src_package != 'patchinfo':
return None
if a.tgt_project not in PROJECT_OPENQA_SETTINGS:
self.logger.warn("not handling %s" % a.tgt_project)
return None
packages = []
# patchinfo collects the binaries and is build for an
# unpredictable architecture so we need iterate over all
url = osc.core.makeurl(
self.apiurl,
('build', a.src_project, a.tgt_project.replace(':', '_')))
root = ET.parse(osc.core.http_GET(url)).getroot()
for arch in [n.attrib['name'] for n in root.findall('entry')]:
query = {'nosource': 1}
url = osc.core.makeurl(
self.apiurl,
('build', a.src_project, a.tgt_project.replace(':', '_'), arch, a.src_package),
query=query)
root = ET.parse(osc.core.http_GET(url)).getroot()
for binary in root.findall('binary'):
m = pkgname_re.match(binary.attrib['filename'])
if m:
# can't use arch here as the patchinfo mixes all
# archs
packages.append(Package(m.group('name'), m.group('version'), m.group('release')))
if not packages:
raise Exception("no packages found")
self.logger.debug('found packages %s', ' '.join(set([p.name for p in packages])))
for update in PROJECT_OPENQA_SETTINGS[a.tgt_project]:
settings = update.settings(a.src_project, a.tgt_project, packages, req)
if settings is not None:
self.logger.info("posting %s %s %s", settings['VERSION'], settings['ARCH'], settings['BUILD'])
self.logger.debug('\n'.join([" %s=%s" % i for i in settings.items()]))
if not self.dryrun:
try:
ret = self.openqa.openqa_request('POST', 'isos', data=settings, retries=1)
self.logger.info(pformat(ret))
except JSONDecodeError, e:
self.logger.error(e)
# TODO: record error
except openqa_exceptions.RequestError, e:
self.logger.error(e)
return None
# check a set of repos for their primary checksums
def calculate_repo_hash(self, repos):
m = md5.new()
# if you want to force it, increase this number
m.update('2')
for url in repos:
url += '/repodata/repomd.xml'
root = ET.parse(osc.core.http_GET(url)).getroot()
cs = root.find('.//{http://linux.duke.edu/metadata/repo}data[@type="primary"]/{http://linux.duke.edu/metadata/repo}checksum')
m.update(cs.text)
return m.hexdigest()
# we don't know the current BUILD and querying all jobs is too expensive
# so we need to check for one known TEST first
# if that job doesn't contain the proper hash, we trigger a new one
# and then we know the build
def trigger_build_for_target(self, prj, u):
today=date.today().strftime("%Y%m%d")
repohash=self.calculate_repo_hash(u['repos'])
s = u['settings']
j = self.openqa.openqa_request(
'GET', 'jobs',
{
'distri': s['DISTRI'],
'version': s['VERSION'],
'arch': s['ARCH'],
'flavor': s['FLAVOR'],
'test': u['test'],
'latest': '1',
})['jobs']
buildnr = None
for job in j:
if job['settings'].get('REPOHASH', '') == repohash:
# take the last in the row
buildnr = job['settings']['BUILD']
self.update_test_builds[prj] = buildnr
# ignore old build numbers, we want a fresh run every day
# to find regressions in the tests and to get data about
# randomly failing tests
if buildnr and buildnr.startswith(today):
return
buildnr = 0
# not found, then check for the next free build nr
for job in j:
build = job['settings']['BUILD']
if build and build.startswith(today):
try:
nr = int(build.split('-')[1])
except:
continue
if nr > buildnr:
buildnr = nr
buildnr = "%s-%d" % (today, buildnr + 1)
# now schedule it for real
s['BUILD'] = buildnr
s['REPOHASH'] = repohash
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
self.update_test_builds[prj] = buildnr
def check_source_submission(self, src_project, src_package, src_rev, dst_project, dst_package):
ReviewBot.ReviewBot.check_source_submission(self, src_project, src_package, src_rev, dst_project, dst_package)
def request_get_openqa_jobs(self, req):
ret = None
types = set([a.type for a in req.actions])
if 'maintenance_release' in types:
src_prjs = set([a.src_project for a in req.actions])
if len(src_prjs) != 1:
raise Exception("can't handle maintenance_release from different incidents")
2016-04-28 14:35:58 +02:00
build = src_prjs.pop()
tgt_prjs = set([a.tgt_project for a in req.actions])
ret = []
for prj in tgt_prjs:
if prj in PROJECT_OPENQA_SETTINGS:
for u in PROJECT_OPENQA_SETTINGS[prj]:
2016-04-28 14:35:58 +02:00
s = u.settings(build, prj, [], req=req)
ret += self.openqa.openqa_request(
'GET', 'jobs',
{
'distri': s['DISTRI'],
'version': s['VERSION'],
'arch': s['ARCH'],
'flavor': s['FLAVOR'],
'build': s['BUILD'],
'scope': 'relevant',
})['jobs']
if prj in TARGET_REPO_SETTINGS:
u = TARGET_REPO_SETTINGS[prj]
s = u['settings']
ret += self.openqa.openqa_request(
'GET', 'jobs',
{
'distri': s['DISTRI'],
'version': s['VERSION'],
'arch': s['ARCH'],
'flavor': s['FLAVOR'],
'build': self.update_test_builds[prj],
'scope': 'relevant',
})['jobs']
return ret
def calculate_qa_status(self, jobs=None):
if not jobs:
return QA_UNKNOWN
j = dict()
has_failed = False
in_progress = False
for job in jobs:
if job['clone_id']:
continue
name = job['name']
if name in j and int(job['id']) < int(j[name]['id']):
continue
j[name] = job
self.logger.debug('job %s in openQA: %s %s %s %s', job['id'], job['settings']['VERSION'], job['settings']['TEST'], job['state'], job['result'])
if job['state'] not in ('cancelled', 'done'):
in_progress = True
else:
if job['result'] != 'passed':
has_failed = True
if not j:
return QA_UNKNOWN
if in_progress:
return QA_INPROGRESS
if has_failed:
return QA_FAILED
return QA_PASSED
def check_publish_enabled(self, project):
url = osc.core.makeurl(self.apiurl, ('source', project, '_meta'))
root = ET.parse(osc.core.http_GET(url)).getroot()
node = root.find('publish')
if node is not None and node.find('disable') is not None:
return False
return True
def add_comment(self, req, msg, state, result=None):
if not self.do_comments:
return
comment = "<!-- openqa state=%s%s -->\n" % (state, ' result=%s' % result if result else '')
comment += "\n" + msg
(comment_id, comment_state, comment_result, comment_text) = self.find_obs_request_comment(req, state)
if comment_id is not None and state == comment_state:
lines_before = len(comment_text.split('\n'))
lines_after = len(comment.split('\n'))
if lines_before == lines_after:
self.logger.debug("not worth the update, previous comment %s is state %s", comment_id, comment_state)
return
self.logger.debug("adding comment to %s, state %s result %s", req.reqid, state, result)
self.logger.debug("message: %s", msg)
if not self.dryrun:
if comment_id is not None:
self.commentapi.delete(comment_id)
self.commentapi.add_comment(request_id=req.reqid, comment=str(comment))
def openqa_overview_url_from_settings(self, settings):
return osc.core.makeurl(self.openqa.baseurl, ['tests'], {'match': settings['BUILD']})
def find_failed_modules(self, job):
failed = []
for module in job['modules']:
if module['result'] != 'failed':
continue
failed.append(module['name'])
return failed
# escape markdown
def emd(self, str):
return str.replace('_', '\_')
def check_one_request(self, req):
ret = None
# just patch apiurl in to avoid having to pass it around
req.apiurl = self.apiurl
try:
jobs = self.request_get_openqa_jobs(req)
qa_state = self.calculate_qa_status(jobs)
self.logger.debug("request %s state %s", req.reqid, qa_state)
msg = None
if self.force or qa_state == QA_UNKNOWN:
ret = ReviewBot.ReviewBot.check_one_request(self, req)
jobs = self.request_get_openqa_jobs(req)
if self.force:
# make sure to delete previous comments if we're forcing
(comment_id, comment_state, comment_result, comment_text) = self.find_obs_request_comment(req)
if comment_id is not None:
self.logger.debug("deleting old comment %s", comment_id)
if not self.dryrun:
self.commentapi.delete(comment_id)
if not jobs:
msg = "no openQA tests defined"
self.add_comment(req, msg, 'done', 'accepted')
ret = True
else:
url = self.openqa_overview_url_from_settings(jobs[0]['settings'])
self.logger.debug("url %s", url)
msg = "now testing in [openQA](%s)" % url
self.add_comment(req, msg, 'seen')
elif qa_state == QA_FAILED or qa_state == QA_PASSED:
url = self.openqa_overview_url_from_settings(jobs[0]['settings'])
if qa_state == QA_PASSED:
self.logger.debug("request %s passed", req.reqid)
2016-05-02 12:53:05 +02:00
msg = "openQA test [passed](%s)\n" % url
state = 'accepted'
ret = True
else:
self.logger.debug("request %s failed", req.reqid)
msg = "openQA test *[FAILED](%s)*\n" % url
state = 'declined'
ret = False
groups = dict()
for job in jobs:
modules = self.find_failed_modules(job)
if modules != []:
modstrings = []
for mod in modules:
modurl = osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])])
modstrings.append("[%s](%s#step/%s/1)" % (self.emd(mod), modurl, mod))
gl = "%s@%s" % (self.emd(job['group']), self.emd(job['settings']['FLAVOR']))
if not gl in groups:
groupurl = osc.core.makeurl(self.openqa.baseurl, ['tests', 'overview' ],
{ 'version': job['settings']['VERSION'],
'groupid': job['group_id'],
'flavor': job['settings']['FLAVOR'],
'distri': job['settings']['DISTRI'],
'build': job['settings']['BUILD'],
})
gmsg = "__Group [%s](%s)__\n" % (gl, groupurl)
else:
gmsg = groups[gl]
gmsg += '\n- [%s](%s) failed in %s' % (
self.emd(job['settings']['TEST']),
osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])]),
','.join(modstrings))
groups[gl] = gmsg
for group in sorted(groups.keys()):
msg += "\n\n" + groups[group]
self.add_comment(req, msg, 'done', state)
elif qa_state == QA_INPROGRESS:
self.logger.debug("request %s still in progress", req.reqid)
else:
raise Exception("unknown QA state %d", qa_state)
except Exception, e:
import traceback
self.logger.error("unhandled exception in openQA Bot")
self.logger.error(traceback.format_exc())
ret = None
return ret
def find_obs_request_comment(self, req, state=None):
"""Return previous comments (should be one)."""
if self.do_comments:
comments = self.commentapi.get_comments(request_id=req.reqid)
for c in comments.values():
m = comment_marker_re.match(c['comment'])
if m and (state is None or state == m.group('state')):
return c['id'], m.group('state'), m.group('result'), c['comment']
return None, None, None, None
class CommandLineInterface(ReviewBot.CommandLineInterface):
def __init__(self, *args, **kwargs):
ReviewBot.CommandLineInterface.__init__(self, args, kwargs)
def get_optparser(self):
parser = ReviewBot.CommandLineInterface.get_optparser(self)
parser.add_option("--force", action="store_true", help="recheck requests that are already considered done")
parser.add_option("--no-comment", dest='comment', action="store_false", help="don't actually post comments to obs")
parser.add_option("--openqa", metavar='HOST', help="openqa api host")
return parser
def setup_checker(self):
apiurl = osc.conf.config['apiurl']
if apiurl is None:
2016-04-28 12:32:09 +02:00
raise osc.oscerr.WrongArgs("missing apiurl")
user = self.options.user
2016-04-28 12:32:09 +02:00
group = self.options.group
if user is None and group is None:
user = osc.conf.get_apiurl_usr(apiurl)
if not self.options.openqa:
2016-04-28 12:32:09 +02:00
raise osc.oscerr.WrongArgs("missing openqa url")
global logger
logger = self.logger
return OpenQABot(
apiurl=apiurl,
dryrun=self.options.dry,
user=user,
2016-04-28 12:32:09 +02:00
group=group,
do_comments=self.options.comment,
openqa=self.options.openqa,
force=self.options.force,
logger=self.logger)
if __name__ == "__main__":
app = CommandLineInterface()
sys.exit(app.main())
# vim: sw=4 et