2016-03-18 09:49:09 +01:00
|
|
|
#!/usr/bin/python
|
2017-01-11 13:35:33 +01:00
|
|
|
# Copyright (c) 2015-2017 SUSE LLC
|
2016-03-18 09:49:09 +01:00
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
|
|
|
|
2017-06-09 16:15:00 +02:00
|
|
|
|
2017-09-05 14:52:21 +02:00
|
|
|
import io
|
2017-06-09 16:15:00 +02:00
|
|
|
import os.path as opa
|
2016-03-18 09:49:09 +01:00
|
|
|
import re
|
|
|
|
import sys
|
2016-07-28 14:11:13 +02:00
|
|
|
from datetime import date
|
|
|
|
import md5
|
2017-07-09 14:44:36 +02:00
|
|
|
import cmdln
|
2017-06-09 16:15:00 +02:00
|
|
|
|
|
|
|
import simplejson as json
|
|
|
|
from simplejson import JSONDecodeError
|
|
|
|
|
2017-01-11 13:35:33 +01:00
|
|
|
import logging
|
2016-11-26 06:59:58 +01:00
|
|
|
import requests
|
2016-10-25 14:48:50 +02:00
|
|
|
from pprint import pformat
|
2016-03-18 09:49:09 +01:00
|
|
|
try:
|
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
except ImportError:
|
|
|
|
import cElementTree as ET
|
|
|
|
|
2017-09-05 23:17:25 +02:00
|
|
|
from collections import namedtuple
|
2017-07-09 14:44:36 +02:00
|
|
|
import gzip
|
2017-09-05 23:17:25 +02:00
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
import osc.conf
|
|
|
|
import osc.core
|
2017-07-09 14:44:36 +02:00
|
|
|
from pprint import pprint
|
2016-03-18 09:49:09 +01:00
|
|
|
from osclib.comments import CommentAPI
|
2017-07-27 15:00:50 +02:00
|
|
|
from osclib.memoize import memoize
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
import ReviewBot
|
|
|
|
|
|
|
|
from openqa_client.client import OpenQA_Client
|
|
|
|
from openqa_client import exceptions as openqa_exceptions
|
|
|
|
|
|
|
|
Package = namedtuple('Package', ('name', 'version', 'release'))
|
|
|
|
|
|
|
|
pkgname_re = re.compile(r'(?P<name>.+)-(?P<version>[^-]+)-(?P<release>[^-]+)\.(?P<arch>[^.]+)\.rpm')
|
|
|
|
|
|
|
|
# QA Results
|
|
|
|
QA_UNKNOWN = 0
|
|
|
|
QA_INPROGRESS = 1
|
|
|
|
QA_FAILED = 2
|
|
|
|
QA_PASSED = 3
|
|
|
|
|
2017-07-26 14:33:46 +02:00
|
|
|
comment_marker_re = re.compile(
|
|
|
|
r'<!-- openqa state=(?P<state>done|seen)(?: result=(?P<result>accepted|declined|none))?(?: revision=(?P<revision>\d+))? -->')
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
logger = None
|
|
|
|
|
2017-07-09 14:44:36 +02:00
|
|
|
incident_name_cache = {}
|
2016-04-28 14:35:58 +02:00
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
# old stuff, for reference
|
|
|
|
# def filterchannel(self, apiurl, prj, packages):
|
|
|
|
# """ filter list of package objects to only include those actually released into prj"""
|
|
|
|
#
|
|
|
|
# prefix = 'SUSE:Updates:'
|
|
|
|
# logger.debug(prj)
|
|
|
|
# if not prj.startswith(prefix):
|
|
|
|
# return packages
|
|
|
|
#
|
|
|
|
# channel = prj[len(prefix):].replace(':', '_')
|
|
|
|
#
|
|
|
|
# url = osc.core.makeurl(apiurl, ('source', 'SUSE:Channels', channel, '_channel'))
|
|
|
|
# root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
#
|
|
|
|
# package_names = set([p.name for p in packages])
|
|
|
|
# in_channel = set([p.attrib['name'] for p in root.iter('binary') if p.attrib['name'] in package_names])
|
|
|
|
#
|
|
|
|
# return [p for p in packages if p.name in in_channel]
|
|
|
|
|
2017-06-09 16:15:00 +02:00
|
|
|
data_path = opa.abspath(opa.dirname(sys.argv[0]))
|
|
|
|
|
|
|
|
with open(opa.join(data_path, "data/kgraft.json"), 'r') as f:
|
|
|
|
KGRAFT_SETTINGS = json.load(f)
|
|
|
|
|
|
|
|
with open(opa.join(data_path, "data/repos.json"), 'r') as f:
|
|
|
|
TARGET_REPO_SETTINGS = json.load(f)
|
|
|
|
|
2017-07-09 14:44:36 +02:00
|
|
|
with open(opa.join(data_path, "data/apimap.json"), 'r') as f:
|
|
|
|
API_MAP = json.load(f)
|
|
|
|
|
2017-09-07 18:53:07 +02:00
|
|
|
MINIMALS = {
|
|
|
|
x.rstrip()
|
|
|
|
for x in requests.get(
|
|
|
|
'https://gitlab.suse.de/qa-maintenance/metadata/raw/master/packages-to-be-tested-on-minimal-systems').iter_lines()
|
|
|
|
if len(x) > 0 and not(x.startswith("#") or x.startswith(' '))}
|
2017-07-27 15:00:50 +02:00
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
class Update(object):
|
|
|
|
|
|
|
|
def __init__(self, settings):
|
|
|
|
self._settings = settings
|
|
|
|
self._settings['_NOOBSOLETEBUILD'] = '1'
|
|
|
|
|
2017-07-18 21:09:22 +02:00
|
|
|
def get_max_revision(self, job):
|
|
|
|
repo = self.repo_prefix() + '/'
|
|
|
|
repo += self.maintenance_project().replace(':', ':/')
|
|
|
|
repo += ':/%s' % str(job['id'])
|
|
|
|
max_revision = 0
|
|
|
|
for channel in job['channels']:
|
|
|
|
crepo = repo + '/' + channel.replace(':', '_')
|
|
|
|
xml = requests.get(crepo + '/repodata/repomd.xml')
|
|
|
|
if not xml.ok:
|
|
|
|
# if one fails, we skip it and wait
|
|
|
|
print crepo, 'has no repodata - waiting'
|
|
|
|
return None
|
|
|
|
root = ET.fromstring(xml.text)
|
|
|
|
rev = root.find('.//{http://linux.duke.edu/metadata/repo}revision')
|
|
|
|
rev = int(rev.text)
|
|
|
|
if rev > max_revision:
|
|
|
|
max_revision = rev
|
|
|
|
return max_revision
|
|
|
|
|
|
|
|
def settings(self, src_prj, dst_prj, packages):
|
2016-08-04 13:50:44 +02:00
|
|
|
s = self._settings.copy()
|
|
|
|
|
|
|
|
# start with a colon so it looks cool behind 'Build' :/
|
2017-07-09 14:44:36 +02:00
|
|
|
s['BUILD'] = ':' + src_prj.split(':')[-1]
|
|
|
|
name = self.incident_name(src_prj)
|
|
|
|
repo = dst_prj.replace(':', '_')
|
|
|
|
repo = '%s/%s/%s/' % (self.repo_prefix(), src_prj.replace(':', ':/'), repo)
|
|
|
|
patch_id = self.patch_id(repo)
|
|
|
|
if patch_id:
|
|
|
|
s['INCIDENT_REPO'] = repo
|
|
|
|
s['INCIDENT_PATCH'] = self.patch_id(repo)
|
|
|
|
s['BUILD'] += ':' + name
|
2017-07-26 14:33:46 +02:00
|
|
|
return [s]
|
|
|
|
|
2017-07-27 15:00:50 +02:00
|
|
|
@memoize()
|
2017-07-26 14:33:46 +02:00
|
|
|
def incident_packages(self, prj):
|
2017-07-27 15:00:50 +02:00
|
|
|
packages = []
|
2017-07-26 14:33:46 +02:00
|
|
|
for package in osc.core.meta_get_packagelist(self.apiurl, prj):
|
|
|
|
if package.startswith('patchinfo'):
|
|
|
|
continue
|
|
|
|
if package.endswith('SUSE_Channels'):
|
|
|
|
continue
|
|
|
|
parts = package.split('.')
|
|
|
|
# remove target name
|
|
|
|
parts.pop()
|
2017-07-27 15:00:50 +02:00
|
|
|
packages.append('.'.join(parts))
|
|
|
|
return packages
|
2016-08-04 13:50:44 +02:00
|
|
|
|
2017-07-09 14:44:36 +02:00
|
|
|
# grab the updateinfo from the given repo and return its patch's id
|
|
|
|
def patch_id(self, repo):
|
|
|
|
url = repo + 'repodata/repomd.xml'
|
|
|
|
repomd = requests.get(url)
|
|
|
|
if not repomd.ok:
|
|
|
|
return None
|
|
|
|
root = ET.fromstring(repomd.text)
|
|
|
|
|
|
|
|
cs = root.find(
|
|
|
|
'.//{http://linux.duke.edu/metadata/repo}data[@type="updateinfo"]/{http://linux.duke.edu/metadata/repo}location')
|
|
|
|
url = repo + cs.attrib['href']
|
|
|
|
|
2017-09-05 14:52:21 +02:00
|
|
|
# python 3 brings gzip.decompress, but with python 2 we need manual io.BytesIO
|
2017-07-09 14:44:36 +02:00
|
|
|
repomd = requests.get(url).content
|
2017-09-05 14:52:21 +02:00
|
|
|
with gzip.GzipFile(fileobj=io.BytesIO(repomd)) as f:
|
|
|
|
root = ET.fromstring(f.read())
|
2017-07-09 14:44:36 +02:00
|
|
|
return root.find('.//id').text
|
|
|
|
|
2016-04-28 14:35:58 +02:00
|
|
|
# take the first package name we find - often enough correct
|
2017-07-09 14:44:36 +02:00
|
|
|
def incident_name(self, prj):
|
|
|
|
if prj not in incident_name_cache:
|
|
|
|
incident_name_cache[prj] = self._incident_name(prj)
|
|
|
|
return incident_name_cache[prj]
|
|
|
|
|
|
|
|
def _incident_name(self, prj):
|
|
|
|
shortest_pkg = None
|
|
|
|
for package in osc.core.meta_get_packagelist(self.apiurl, prj):
|
|
|
|
if package.startswith('patchinfo'):
|
|
|
|
continue
|
|
|
|
if package.endswith('SUSE_Channels'):
|
2016-04-28 14:35:58 +02:00
|
|
|
continue
|
|
|
|
url = osc.core.makeurl(
|
2017-07-09 14:44:36 +02:00
|
|
|
self.apiurl,
|
|
|
|
('source', prj, package, '_link'))
|
2016-04-28 14:35:58 +02:00
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
if root.attrib.get('cicount'):
|
|
|
|
continue
|
2017-07-09 14:44:36 +02:00
|
|
|
if not shortest_pkg or len(package) < len(shortest_pkg):
|
|
|
|
shortest_pkg = package
|
|
|
|
if not shortest_pkg:
|
|
|
|
shortest_pkg = 'unknown'
|
|
|
|
match = re.match(r'^(.*)\.[^\.]*$', shortest_pkg)
|
|
|
|
if match:
|
|
|
|
return match.group(1)
|
|
|
|
return shortest_pkg
|
2016-04-28 14:35:58 +02:00
|
|
|
|
2017-07-18 21:09:22 +02:00
|
|
|
def calculate_lastest_good_updates(self, openqa, settings):
|
|
|
|
j = openqa.openqa_request(
|
|
|
|
'GET', 'jobs',
|
|
|
|
{
|
|
|
|
'distri': settings['DISTRI'],
|
|
|
|
'version': settings['VERSION'],
|
|
|
|
'arch': settings['ARCH'],
|
|
|
|
'flavor': 'Updates',
|
|
|
|
'scope': 'current',
|
|
|
|
'limit': 100 # this needs increasing if we ever get *monster* coverage for released updates
|
|
|
|
})['jobs']
|
|
|
|
# check all publishing jobs per build and reject incomplete builds
|
|
|
|
builds = {}
|
|
|
|
for job in j:
|
|
|
|
if 'PUBLISH_HDD_1' not in job['settings']:
|
|
|
|
continue
|
|
|
|
if job['result'] == 'passed' or job['result'] == 'softfailed':
|
|
|
|
builds.setdefault(job['settings']['BUILD'], 'passed')
|
|
|
|
else:
|
|
|
|
builds[job['settings']['BUILD']] = 'failed'
|
|
|
|
|
|
|
|
# take the last one passing completely
|
|
|
|
lastgood_prefix = 0
|
|
|
|
lastgood_suffix = 0
|
|
|
|
for build, status in builds.items():
|
|
|
|
if status == 'passed':
|
|
|
|
try:
|
|
|
|
prefix = int(build.split('-')[0])
|
|
|
|
suffix = int(build.split('-')[1])
|
|
|
|
if prefix > lastgood_prefix:
|
|
|
|
lastgood_prefix = prefix
|
|
|
|
lastgood_suffix = suffix
|
|
|
|
elif prefix == lastgood_prefix and suffix > lastgood_suffix:
|
|
|
|
lastgood_suffix = suffix
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if lastgood_prefix:
|
|
|
|
settings['LATEST_GOOD_UPDATES_BUILD'] = "%d-%d" % (lastgood_prefix, lastgood_suffix)
|
2017-07-07 09:16:35 +02:00
|
|
|
|
2017-07-26 14:33:46 +02:00
|
|
|
|
2016-08-04 13:50:44 +02:00
|
|
|
class SUSEUpdate(Update):
|
|
|
|
|
2016-08-12 14:10:23 +02:00
|
|
|
def repo_prefix(self):
|
|
|
|
return 'http://download.suse.de/ibs'
|
|
|
|
|
2017-07-18 21:09:22 +02:00
|
|
|
def maintenance_project(self):
|
|
|
|
return 'SUSE:Maintenance'
|
|
|
|
|
2016-10-19 15:34:03 +02:00
|
|
|
# we take requests that have a kgraft-patch package as kgraft patch (suprise!)
|
2017-07-27 21:16:45 +02:00
|
|
|
@staticmethod
|
|
|
|
def kgraft_target(apiurl, prj):
|
2017-06-28 15:15:44 +02:00
|
|
|
target = None
|
|
|
|
action = None
|
|
|
|
skip = False
|
|
|
|
pattern = re.compile(r"kgraft-patch-([^.]+)\.")
|
|
|
|
|
2017-07-27 21:16:45 +02:00
|
|
|
for package in osc.core.meta_get_packagelist(apiurl, prj):
|
2017-07-18 21:09:22 +02:00
|
|
|
if package.startswith("kernel-"):
|
|
|
|
skip = True
|
|
|
|
break
|
|
|
|
match = re.match(pattern, package)
|
|
|
|
if match:
|
|
|
|
target = match.group(1)
|
2017-06-28 15:15:44 +02:00
|
|
|
if skip:
|
2017-07-27 21:16:45 +02:00
|
|
|
return None
|
2017-06-28 15:15:44 +02:00
|
|
|
|
2017-07-18 21:09:22 +02:00
|
|
|
return target
|
2016-10-19 15:34:03 +02:00
|
|
|
|
2017-06-29 14:15:50 +02:00
|
|
|
@staticmethod
|
|
|
|
def parse_kgraft_version(kgraft_target):
|
|
|
|
return kgraft_target.lstrip('SLE').split('_')[0]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def kernel_target(req):
|
|
|
|
if req:
|
|
|
|
for a in req.actions:
|
|
|
|
# kernel incidents have kernel-source package (suprise!)
|
|
|
|
if a.src_package.startswith('kernel-source'):
|
|
|
|
return True, a
|
|
|
|
return None, None
|
|
|
|
|
2017-07-26 14:33:46 +02:00
|
|
|
def add_minimal_settings(self, prj, settings):
|
|
|
|
minimal = False
|
|
|
|
for pkg in self.incident_packages(prj):
|
2017-07-27 15:00:50 +02:00
|
|
|
if pkg in MINIMALS:
|
2017-07-26 14:33:46 +02:00
|
|
|
minimal = True
|
|
|
|
if not minimal:
|
|
|
|
return []
|
|
|
|
|
|
|
|
settings = settings.copy()
|
|
|
|
settings['FLAVOR'] += '-Minimal'
|
|
|
|
return [settings]
|
|
|
|
|
|
|
|
def add_kernel_settings(self, prj, settings):
|
|
|
|
settings = settings.copy()
|
|
|
|
# not right now
|
|
|
|
return []
|
2016-10-19 15:34:03 +02:00
|
|
|
|
2017-06-29 14:15:50 +02:00
|
|
|
# special handling for kgraft and kernel incidents
|
2017-06-28 15:15:44 +02:00
|
|
|
if settings['FLAVOR'] in ('KGraft', 'Server-DVD-Incidents-Kernel'):
|
2017-07-27 21:16:45 +02:00
|
|
|
kgraft_target = kgraft_target(self.apiurl, src_prj)
|
2017-06-29 14:15:50 +02:00
|
|
|
# Server-DVD-Incidents-Incidents handling
|
|
|
|
if settings['FLAVOR'] == 'Server-DVD-Incidents-Kernel':
|
2017-07-18 21:09:22 +02:00
|
|
|
kernel_target = self.kernel_target(src_prj)
|
2017-06-29 14:15:50 +02:00
|
|
|
if kernel_target or kgraft_target:
|
|
|
|
# incident_id as part of BUILD
|
|
|
|
if kgraft_target:
|
2017-07-18 21:09:22 +02:00
|
|
|
incident_id = re.match(r".*:(\d+)$", src_prj).group(1)
|
2017-07-03 16:52:50 +02:00
|
|
|
name = '.kgraft.'
|
|
|
|
settings['KGRAFT'] = '1'
|
2017-06-29 14:15:50 +02:00
|
|
|
else:
|
2017-07-18 21:09:22 +02:00
|
|
|
incident_id = re.match(r".*:(\d+)$", src_prj).group(1)
|
2017-07-03 16:52:50 +02:00
|
|
|
name = '.kernel.'
|
2017-06-29 14:15:50 +02:00
|
|
|
|
|
|
|
# discard jobs without 'start'
|
|
|
|
settings['start'] = True
|
|
|
|
settings['BUILD'] = ':' + req.reqid + name + incident_id
|
|
|
|
if kgraft_target:
|
|
|
|
settings['VERSION'] = self.parse_kgraft_version(kgraft_target)
|
2016-10-19 15:34:03 +02:00
|
|
|
# ignore kgraft patches without defined target
|
|
|
|
# they are actually only the base for kgraft
|
2017-07-07 09:16:35 +02:00
|
|
|
if settings['FLAVOR'] == 'KGraft' and kgraft_target and kgraft_target in KGRAFT_SETTINGS:
|
2017-07-18 21:09:22 +02:00
|
|
|
incident_id = re.match(r".*:(\d+)$", src_prj).group(1)
|
2016-10-19 15:34:03 +02:00
|
|
|
settings.update(KGRAFT_SETTINGS[kgraft_target])
|
2017-07-18 21:09:22 +02:00
|
|
|
settings['BUILD'] = ':kgraft.' + incident_id
|
2017-07-26 14:33:46 +02:00
|
|
|
# TODO settings['MAINT_UPDATE_RRID'] = src_prj + ':' + req.reqid
|
|
|
|
return settings
|
|
|
|
|
|
|
|
def settings(self, src_prj, dst_prj, packages):
|
|
|
|
settings = super(SUSEUpdate, self).settings(src_prj, dst_prj, packages)
|
|
|
|
if not len(settings):
|
|
|
|
return []
|
|
|
|
|
|
|
|
settings += self.add_kernel_settings(src_prj, settings[0])
|
|
|
|
settings += self.add_minimal_settings(src_prj, settings[0])
|
2016-10-19 15:34:03 +02:00
|
|
|
|
2016-04-28 14:35:58 +02:00
|
|
|
return settings
|
|
|
|
|
2017-07-07 09:16:35 +02:00
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
class openSUSEUpdate(Update):
|
|
|
|
|
2016-08-12 14:10:23 +02:00
|
|
|
def repo_prefix(self):
|
|
|
|
return 'http://download.opensuse.org/repositories'
|
|
|
|
|
2017-07-18 21:09:22 +02:00
|
|
|
def maintenance_project(self):
|
|
|
|
return 'openSUSE:Maintenance'
|
|
|
|
|
|
|
|
def settings(self, src_prj, dst_prj, packages):
|
|
|
|
settings = super(openSUSEUpdate, self).settings(src_prj, dst_prj, packages)
|
2017-07-26 14:33:46 +02:00
|
|
|
settings = settings[0]
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
# openSUSE:Maintenance key
|
|
|
|
settings['IMPORT_GPG_KEYS'] = 'gpg-pubkey-b3fd7e48-5549fd0f'
|
|
|
|
settings['ZYPPER_ADD_REPO_PREFIX'] = 'incident'
|
|
|
|
|
|
|
|
if packages:
|
|
|
|
# XXX: this may fail in various ways
|
|
|
|
# - conflicts between subpackages
|
|
|
|
# - added packages
|
|
|
|
# - conflicts with installed packages (e.g sendmail vs postfix)
|
|
|
|
settings['INSTALL_PACKAGES'] = ' '.join(set([p.name for p in packages]))
|
2017-07-07 09:16:35 +02:00
|
|
|
settings['VERIFY_PACKAGE_VERSIONS'] = ' '.join(
|
|
|
|
['{} {}-{}'.format(p.name, p.version, p.release) for p in packages])
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2016-08-12 14:10:23 +02:00
|
|
|
settings['ZYPPER_ADD_REPOS'] = settings['INCIDENT_REPO']
|
|
|
|
settings['ADDONURL'] = settings['INCIDENT_REPO']
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
settings['WITH_MAIN_REPO'] = 1
|
|
|
|
settings['WITH_UPDATE_REPO'] = 1
|
|
|
|
|
2017-07-26 14:33:46 +02:00
|
|
|
return [settings]
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
|
2017-06-23 10:37:18 +02:00
|
|
|
PROJECT_OPENQA_SETTINGS = {}
|
|
|
|
|
|
|
|
with open(opa.join(data_path, "data/incidents.json"), 'r') as f:
|
2017-07-07 09:16:35 +02:00
|
|
|
for i, j in json.load(f).items():
|
2017-06-23 10:37:18 +02:00
|
|
|
if i.startswith('SUSE'):
|
2017-07-18 21:09:22 +02:00
|
|
|
PROJECT_OPENQA_SETTINGS[i] = SUSEUpdate(j)
|
2017-06-23 10:37:18 +02:00
|
|
|
elif i.startswith('openSUSE'):
|
2017-07-18 21:09:22 +02:00
|
|
|
PROJECT_OPENQA_SETTINGS[i] = openSUSEUpdate(j)
|
2017-06-23 10:37:18 +02:00
|
|
|
else:
|
2017-07-18 21:09:22 +02:00
|
|
|
raise "Unknown openqa", i
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2017-07-26 14:33:46 +02:00
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
class OpenQABot(ReviewBot.ReviewBot):
|
2017-07-07 14:30:00 +02:00
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
""" check ABI of library packages
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2017-01-02 02:34:13 -06:00
|
|
|
ReviewBot.ReviewBot.__init__(self, *args, **kwargs)
|
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
self.force = False
|
|
|
|
self.openqa = None
|
|
|
|
self.commentapi = CommentAPI(self.apiurl)
|
2016-07-28 15:26:14 +02:00
|
|
|
self.update_test_builds = dict()
|
2017-07-27 21:16:45 +02:00
|
|
|
self.openqa_jobs = dict()
|
2016-07-28 15:26:14 +02:00
|
|
|
|
2016-10-19 15:34:03 +02:00
|
|
|
def gather_test_builds(self):
|
2016-10-08 07:49:24 +02:00
|
|
|
for prj, u in TARGET_REPO_SETTINGS[self.openqa.baseurl].items():
|
|
|
|
buildnr = 0
|
2017-06-21 13:45:29 +02:00
|
|
|
cjob = 0
|
2016-10-08 07:49:24 +02:00
|
|
|
for j in self.jobs_for_target(u):
|
2017-06-21 13:45:29 +02:00
|
|
|
# avoid going backwards in job ID
|
|
|
|
if cjob > int(j['id']):
|
2017-07-07 09:16:35 +02:00
|
|
|
continue
|
2016-10-08 07:49:24 +02:00
|
|
|
buildnr = j['settings']['BUILD']
|
2017-06-21 13:45:29 +02:00
|
|
|
cjob = int(j['id'])
|
2016-10-08 07:49:24 +02:00
|
|
|
self.update_test_builds[prj] = buildnr
|
2017-07-27 21:16:45 +02:00
|
|
|
jobs = self.jobs_for_target(u, build=buildnr)
|
|
|
|
self.openqa_jobs[prj] = jobs
|
|
|
|
if self.calculate_qa_status(jobs) == QA_INPROGRESS:
|
|
|
|
self.pending_target_repos.add(prj)
|
2016-10-08 07:49:24 +02:00
|
|
|
|
2016-10-19 15:34:03 +02:00
|
|
|
# reimplemention from baseclass
|
|
|
|
def check_requests(self):
|
|
|
|
|
2017-07-27 15:00:50 +02:00
|
|
|
if self.apiurl.endswith('.suse.de'):
|
|
|
|
self.check_suse_incidents()
|
|
|
|
|
2016-10-19 15:34:03 +02:00
|
|
|
# first calculate the latest build number for current jobs
|
2017-03-26 17:02:20 +02:00
|
|
|
self.pending_target_repos = set()
|
2017-07-27 21:28:07 +02:00
|
|
|
self.gather_test_builds()
|
2017-03-26 17:02:20 +02:00
|
|
|
|
2016-10-08 07:49:24 +02:00
|
|
|
started = []
|
|
|
|
# then check progress on running incidents
|
|
|
|
for req in self.requests:
|
|
|
|
jobs = self.request_get_openqa_jobs(req, incident=True, test_repo=True)
|
|
|
|
ret = self.calculate_qa_status(jobs)
|
|
|
|
if ret != QA_UNKNOWN:
|
|
|
|
started.append(req)
|
|
|
|
|
|
|
|
all_requests = self.requests
|
|
|
|
self.requests = started
|
|
|
|
ReviewBot.ReviewBot.check_requests(self)
|
|
|
|
|
|
|
|
self.requests = all_requests
|
|
|
|
|
2017-03-26 17:02:20 +02:00
|
|
|
skipped_one = False
|
2016-10-08 07:49:24 +02:00
|
|
|
# now make sure the jobs are for current repo
|
2016-08-03 16:16:34 +02:00
|
|
|
for prj, u in TARGET_REPO_SETTINGS[self.openqa.baseurl].items():
|
2017-03-26 17:02:20 +02:00
|
|
|
if prj in self.pending_target_repos:
|
|
|
|
skipped_one = True
|
|
|
|
continue
|
2016-07-28 15:26:14 +02:00
|
|
|
self.trigger_build_for_target(prj, u)
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2017-03-26 17:02:20 +02:00
|
|
|
# do not schedule new incidents unless we finished
|
|
|
|
# last wave
|
|
|
|
if skipped_one:
|
|
|
|
return
|
|
|
|
|
2016-10-08 07:49:24 +02:00
|
|
|
ReviewBot.ReviewBot.check_requests(self)
|
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
def check_action_maintenance_release(self, req, a):
|
|
|
|
# we only look at the binaries of the patchinfo
|
|
|
|
if a.src_package != 'patchinfo':
|
|
|
|
return None
|
|
|
|
|
|
|
|
if a.tgt_project not in PROJECT_OPENQA_SETTINGS:
|
|
|
|
self.logger.warn("not handling %s" % a.tgt_project)
|
|
|
|
return None
|
|
|
|
|
2017-07-27 20:15:09 +02:00
|
|
|
# TODO - this needs to be moved
|
|
|
|
return None
|
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
packages = []
|
|
|
|
# patchinfo collects the binaries and is build for an
|
|
|
|
# unpredictable architecture so we need iterate over all
|
|
|
|
url = osc.core.makeurl(
|
|
|
|
self.apiurl,
|
|
|
|
('build', a.src_project, a.tgt_project.replace(':', '_')))
|
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
for arch in [n.attrib['name'] for n in root.findall('entry')]:
|
|
|
|
query = {'nosource': 1}
|
|
|
|
url = osc.core.makeurl(
|
|
|
|
self.apiurl,
|
|
|
|
('build', a.src_project, a.tgt_project.replace(':', '_'), arch, a.src_package),
|
|
|
|
query=query)
|
|
|
|
|
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
|
|
|
|
for binary in root.findall('binary'):
|
|
|
|
m = pkgname_re.match(binary.attrib['filename'])
|
|
|
|
if m:
|
|
|
|
# can't use arch here as the patchinfo mixes all
|
|
|
|
# archs
|
|
|
|
packages.append(Package(m.group('name'), m.group('version'), m.group('release')))
|
|
|
|
|
|
|
|
if not packages:
|
|
|
|
raise Exception("no packages found")
|
|
|
|
|
2017-07-27 20:15:09 +02:00
|
|
|
update.calculate_lastest_good_updates(self.openqa, settings)
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
return None
|
|
|
|
|
2016-07-28 14:11:13 +02:00
|
|
|
# check a set of repos for their primary checksums
|
2017-06-15 13:16:09 +02:00
|
|
|
@staticmethod
|
|
|
|
def calculate_repo_hash(repos):
|
2016-07-28 14:11:13 +02:00
|
|
|
m = md5.new()
|
|
|
|
# if you want to force it, increase this number
|
2016-10-07 11:01:30 +02:00
|
|
|
m.update('b')
|
2016-07-28 14:11:13 +02:00
|
|
|
for url in repos:
|
|
|
|
url += '/repodata/repomd.xml'
|
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
2017-07-07 09:16:35 +02:00
|
|
|
cs = root.find(
|
|
|
|
'.//{http://linux.duke.edu/metadata/repo}data[@type="primary"]/{http://linux.duke.edu/metadata/repo}checksum')
|
2016-07-28 14:11:13 +02:00
|
|
|
m.update(cs.text)
|
|
|
|
return m.hexdigest()
|
|
|
|
|
2017-07-07 14:30:00 +02:00
|
|
|
def is_incident_in_testing(self, incident):
|
|
|
|
# hard coded for now as we only run this code for SUSE Maintenance workflow
|
|
|
|
project = 'SUSE:Maintenance:%s' % incident
|
|
|
|
|
|
|
|
xpath = "(state/@name='review') and (action/source/@project='%s' and action/@type='maintenance_release')" % (project)
|
|
|
|
res = osc.core.search(self.apiurl, request=xpath)['request']
|
|
|
|
# return the one and only (or None)
|
|
|
|
return res.find('request')
|
|
|
|
|
2017-06-15 13:16:09 +02:00
|
|
|
def calculate_incidents(self, incidents):
|
|
|
|
"""
|
2017-06-15 19:49:04 +02:00
|
|
|
get incident numbers from SUSE:Maintenance:Test project
|
2017-06-15 13:16:09 +02:00
|
|
|
returns dict with openQA var name : string with numbers
|
|
|
|
"""
|
2017-07-07 09:16:35 +02:00
|
|
|
l_incidents = []
|
2017-06-15 13:16:09 +02:00
|
|
|
for kind, prj in incidents.items():
|
2017-07-07 14:30:00 +02:00
|
|
|
packages = osc.core.meta_get_packagelist(self.apiurl, prj)
|
|
|
|
incidents = []
|
|
|
|
# filter out incidents in staging
|
|
|
|
for incident in packages:
|
|
|
|
# remove patchinfo. prefix
|
|
|
|
incident = incident.replace('_', '.').split('.')[1]
|
|
|
|
req = self.is_incident_in_testing(incident)
|
|
|
|
# without release request it's in staging
|
|
|
|
if req is None:
|
|
|
|
continue
|
|
|
|
|
2017-07-27 21:16:45 +02:00
|
|
|
# skip kgraft patches from aggregation
|
2017-07-07 14:30:00 +02:00
|
|
|
req_ = osc.core.Request()
|
|
|
|
req_.read(req)
|
2017-07-27 21:16:45 +02:00
|
|
|
src_prjs = set([a.src_project for a in req_.actions])
|
|
|
|
if SUSEUpdate.kgraft_target(self.apiurl, src_prjs.pop()):
|
2017-07-07 14:30:00 +02:00
|
|
|
continue
|
2017-07-27 21:16:45 +02:00
|
|
|
|
2017-07-07 14:30:00 +02:00
|
|
|
incidents.append(incident)
|
|
|
|
|
|
|
|
l_incidents.append((kind + '_TEST_ISSUES', ','.join(incidents)))
|
|
|
|
|
2017-06-15 13:16:09 +02:00
|
|
|
return l_incidents
|
|
|
|
|
2017-07-27 21:16:45 +02:00
|
|
|
def jobs_for_target(self, data, build=None):
|
2017-06-15 13:16:09 +02:00
|
|
|
s = data['settings'][0]
|
2017-07-27 21:16:45 +02:00
|
|
|
values = {
|
|
|
|
'distri': s['DISTRI'],
|
|
|
|
'version': s['VERSION'],
|
|
|
|
'arch': s['ARCH'],
|
|
|
|
'flavor': s['FLAVOR'],
|
|
|
|
'scope': 'relevant',
|
|
|
|
'latest': '1',
|
|
|
|
}
|
|
|
|
if build:
|
|
|
|
values['build'] = build
|
|
|
|
else:
|
|
|
|
values['test'] = data['test']
|
|
|
|
return self.openqa.openqa_request('GET', 'jobs', values)['jobs']
|
2016-10-08 07:49:24 +02:00
|
|
|
|
|
|
|
# we don't know the current BUILD and querying all jobs is too expensive
|
|
|
|
# so we need to check for one known TEST first
|
|
|
|
# if that job doesn't contain the proper hash, we trigger a new one
|
|
|
|
# and then we know the build
|
2017-06-15 13:16:09 +02:00
|
|
|
def trigger_build_for_target(self, prj, data):
|
2017-07-07 09:16:35 +02:00
|
|
|
today = date.today().strftime("%Y%m%d")
|
|
|
|
repohash = self.calculate_repo_hash(data['repos'])
|
2016-07-28 14:11:13 +02:00
|
|
|
buildnr = None
|
2017-06-15 13:16:09 +02:00
|
|
|
j = self.jobs_for_target(data)
|
2016-07-28 14:11:13 +02:00
|
|
|
for job in j:
|
|
|
|
if job['settings'].get('REPOHASH', '') == repohash:
|
2016-07-28 15:26:14 +02:00
|
|
|
# take the last in the row
|
2016-07-28 14:11:13 +02:00
|
|
|
buildnr = job['settings']['BUILD']
|
2016-07-28 15:26:14 +02:00
|
|
|
self.update_test_builds[prj] = buildnr
|
2016-07-28 15:30:02 +02:00
|
|
|
# ignore old build numbers, we want a fresh run every day
|
|
|
|
# to find regressions in the tests and to get data about
|
|
|
|
# randomly failing tests
|
|
|
|
if buildnr and buildnr.startswith(today):
|
2016-07-28 15:26:14 +02:00
|
|
|
return
|
2016-07-28 14:11:13 +02:00
|
|
|
|
2016-07-29 07:33:15 +02:00
|
|
|
buildnr = 0
|
|
|
|
|
2016-07-28 14:11:13 +02:00
|
|
|
# not found, then check for the next free build nr
|
|
|
|
for job in j:
|
2016-10-20 13:54:28 +02:00
|
|
|
build = job['settings']['BUILD']
|
|
|
|
if build and build.startswith(today):
|
|
|
|
try:
|
|
|
|
nr = int(build.split('-')[1])
|
2016-07-28 14:11:13 +02:00
|
|
|
if nr > buildnr:
|
|
|
|
buildnr = nr
|
2017-07-07 09:16:35 +02:00
|
|
|
except BaseException:
|
2016-10-20 13:54:28 +02:00
|
|
|
continue
|
2016-07-28 14:11:13 +02:00
|
|
|
|
2016-07-29 07:33:15 +02:00
|
|
|
buildnr = "%s-%d" % (today, buildnr + 1)
|
2016-07-28 14:11:13 +02:00
|
|
|
|
2017-06-15 13:16:09 +02:00
|
|
|
for s in data['settings']:
|
2017-06-15 19:49:04 +02:00
|
|
|
# now schedule it for real
|
2017-06-15 13:16:09 +02:00
|
|
|
if 'incidents' in data.keys():
|
2017-07-07 09:16:35 +02:00
|
|
|
for x, y in self.calculate_incidents(data['incidents']):
|
|
|
|
s[x] = y
|
2016-08-03 16:16:34 +02:00
|
|
|
s['BUILD'] = buildnr
|
|
|
|
s['REPOHASH'] = repohash
|
2017-06-20 14:16:47 +02:00
|
|
|
self.logger.debug(pformat(s))
|
|
|
|
if not self.dryrun:
|
|
|
|
try:
|
|
|
|
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
2017-07-07 09:16:35 +02:00
|
|
|
except Exception as e:
|
2017-06-20 14:16:47 +02:00
|
|
|
self.logger.debug(e)
|
2016-07-28 15:26:14 +02:00
|
|
|
self.update_test_builds[prj] = buildnr
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2016-07-28 14:11:13 +02:00
|
|
|
def check_source_submission(self, src_project, src_package, src_rev, dst_project, dst_package):
|
2016-03-18 09:49:09 +01:00
|
|
|
ReviewBot.ReviewBot.check_source_submission(self, src_project, src_package, src_rev, dst_project, dst_package)
|
|
|
|
|
2016-07-29 12:43:16 +02:00
|
|
|
def request_get_openqa_jobs(self, req, incident=True, test_repo=False):
|
2016-03-18 09:49:09 +01:00
|
|
|
ret = None
|
|
|
|
types = set([a.type for a in req.actions])
|
|
|
|
if 'maintenance_release' in types:
|
|
|
|
src_prjs = set([a.src_project for a in req.actions])
|
|
|
|
if len(src_prjs) != 1:
|
|
|
|
raise Exception("can't handle maintenance_release from different incidents")
|
2016-04-28 14:35:58 +02:00
|
|
|
build = src_prjs.pop()
|
2016-03-18 09:49:09 +01:00
|
|
|
tgt_prjs = set([a.tgt_project for a in req.actions])
|
|
|
|
ret = []
|
2017-07-27 20:15:09 +02:00
|
|
|
if incident:
|
2017-07-29 18:31:01 +02:00
|
|
|
ret += self.openqa_jobs[build]
|
|
|
|
for prj in sorted(tgt_prjs):
|
2016-08-03 16:16:34 +02:00
|
|
|
repo_settings = TARGET_REPO_SETTINGS.get(self.openqa.baseurl, {})
|
2016-07-29 12:43:16 +02:00
|
|
|
if test_repo and prj in repo_settings:
|
2017-07-27 21:16:45 +02:00
|
|
|
repo_jobs = self.openqa_jobs[prj]
|
|
|
|
ret += repo_jobs
|
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
return ret
|
|
|
|
|
|
|
|
def calculate_qa_status(self, jobs=None):
|
|
|
|
if not jobs:
|
|
|
|
return QA_UNKNOWN
|
|
|
|
|
|
|
|
j = dict()
|
|
|
|
has_failed = False
|
|
|
|
in_progress = False
|
|
|
|
for job in jobs:
|
|
|
|
if job['clone_id']:
|
|
|
|
continue
|
|
|
|
name = job['name']
|
|
|
|
if name in j and int(job['id']) < int(j[name]['id']):
|
|
|
|
continue
|
|
|
|
j[name] = job
|
2017-01-11 13:35:33 +01:00
|
|
|
#self.logger.debug('job %s in openQA: %s %s %s %s', job['id'], job['settings']['VERSION'], job['settings']['TEST'], job['state'], job['result'])
|
2016-03-18 09:49:09 +01:00
|
|
|
if job['state'] not in ('cancelled', 'done'):
|
|
|
|
in_progress = True
|
|
|
|
else:
|
2016-07-29 12:43:16 +02:00
|
|
|
if job['result'] != 'passed' and job['result'] != 'softfailed':
|
2016-03-18 09:49:09 +01:00
|
|
|
has_failed = True
|
|
|
|
|
|
|
|
if not j:
|
|
|
|
return QA_UNKNOWN
|
|
|
|
if in_progress:
|
|
|
|
return QA_INPROGRESS
|
|
|
|
if has_failed:
|
|
|
|
return QA_FAILED
|
|
|
|
|
|
|
|
return QA_PASSED
|
|
|
|
|
2017-07-21 15:04:33 +02:00
|
|
|
def add_comment(self, msg, state, request_id=None, result=None):
|
2016-03-18 09:49:09 +01:00
|
|
|
if not self.do_comments:
|
|
|
|
return
|
|
|
|
|
|
|
|
comment = "<!-- openqa state=%s%s -->\n" % (state, ' result=%s' % result if result else '')
|
|
|
|
comment += "\n" + msg
|
|
|
|
|
2017-07-29 18:31:01 +02:00
|
|
|
info = self.find_obs_request_comment(request_id=request_id)
|
2017-07-19 14:18:29 +02:00
|
|
|
comment_id = info.get('id', None)
|
2016-04-29 15:08:31 +02:00
|
|
|
|
2017-07-19 14:18:29 +02:00
|
|
|
if state == info.get('state', 'missing'):
|
|
|
|
lines_before = len(info['comment'].split('\n'))
|
2016-04-29 15:08:31 +02:00
|
|
|
lines_after = len(comment.split('\n'))
|
|
|
|
if lines_before == lines_after:
|
2017-07-19 14:18:29 +02:00
|
|
|
self.logger.debug("not worth the update, previous comment %s is state %s", comment_id, info['state'])
|
2016-04-29 15:08:31 +02:00
|
|
|
return
|
|
|
|
|
2017-07-21 15:04:33 +02:00
|
|
|
self.logger.debug("adding comment to %s, state %s result %s", request_id, state, result)
|
2016-03-18 09:49:09 +01:00
|
|
|
self.logger.debug("message: %s", msg)
|
|
|
|
if not self.dryrun:
|
2016-04-29 15:08:31 +02:00
|
|
|
if comment_id is not None:
|
|
|
|
self.commentapi.delete(comment_id)
|
2017-07-21 15:04:33 +02:00
|
|
|
self.commentapi.add_comment(request_id=request_id, comment=str(comment))
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2016-10-07 11:01:30 +02:00
|
|
|
# escape markdown
|
2017-06-26 10:06:27 +02:00
|
|
|
@staticmethod
|
|
|
|
def emd(str):
|
2016-10-07 11:01:30 +02:00
|
|
|
return str.replace('_', '\_')
|
|
|
|
|
2016-11-24 12:20:30 +01:00
|
|
|
def get_step_url(self, testurl, modulename):
|
|
|
|
failurl = testurl + '/modules/%s/fails' % modulename
|
2016-11-26 06:59:58 +01:00
|
|
|
fails = requests.get(failurl).json()
|
2016-11-24 12:20:30 +01:00
|
|
|
failed_step = fails.get('first_failed_step', 1)
|
|
|
|
return "[%s](%s#step/%s/%d)" % (self.emd(modulename), testurl, modulename, failed_step)
|
|
|
|
|
2016-12-09 14:03:55 +01:00
|
|
|
def job_test_name(self, job):
|
|
|
|
return "%s@%s" % (self.emd(job['settings']['TEST']), self.emd(job['settings']['MACHINE']))
|
|
|
|
|
2016-10-07 11:01:30 +02:00
|
|
|
def summarize_one_openqa_job(self, job):
|
|
|
|
testurl = osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])])
|
|
|
|
if not job['result'] in ['passed', 'failed', 'softfailed']:
|
2017-07-19 14:18:29 +02:00
|
|
|
rstring = job['result']
|
|
|
|
if rstring == 'none':
|
2017-07-21 15:04:33 +02:00
|
|
|
return None
|
2017-07-19 14:18:29 +02:00
|
|
|
return '\n- [%s](%s) is %s' % (self.job_test_name(job), testurl, rstring)
|
2016-10-07 11:01:30 +02:00
|
|
|
|
|
|
|
modstrings = []
|
2016-03-18 09:49:09 +01:00
|
|
|
for module in job['modules']:
|
|
|
|
if module['result'] != 'failed':
|
|
|
|
continue
|
2016-11-24 12:20:30 +01:00
|
|
|
modstrings.append(self.get_step_url(testurl, module['name']))
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2016-10-07 11:01:30 +02:00
|
|
|
if len(modstrings):
|
2016-12-09 14:03:55 +01:00
|
|
|
return '\n- [%s](%s) failed in %s' % (self.job_test_name(job), testurl, ','.join(modstrings))
|
2017-07-07 09:16:35 +02:00
|
|
|
elif job['result'] == 'failed': # rare case: fail without module fails
|
2016-12-09 14:03:55 +01:00
|
|
|
return '\n- [%s](%s) failed' % (self.job_test_name(job), testurl)
|
2016-10-07 11:01:30 +02:00
|
|
|
return ''
|
2016-07-28 21:21:47 +02:00
|
|
|
|
2017-07-19 14:18:29 +02:00
|
|
|
def summarize_openqa_jobs(self, jobs):
|
|
|
|
groups = dict()
|
|
|
|
for job in jobs:
|
|
|
|
gl = "%s@%s" % (self.emd(job['group']), self.emd(job['settings']['FLAVOR']))
|
|
|
|
if gl not in groups:
|
|
|
|
groupurl = osc.core.makeurl(self.openqa.baseurl, ['tests', 'overview'],
|
|
|
|
{'version': job['settings']['VERSION'],
|
|
|
|
'groupid': job['group_id'],
|
|
|
|
'flavor': job['settings']['FLAVOR'],
|
|
|
|
'distri': job['settings']['DISTRI'],
|
|
|
|
'build': job['settings']['BUILD'],
|
|
|
|
})
|
|
|
|
groups[gl] = {'title': "__Group [%s](%s)__\n" % (gl, groupurl),
|
2017-07-21 15:04:33 +02:00
|
|
|
'passed': 0, 'unfinished': 0, 'failed': []}
|
2017-07-19 14:18:29 +02:00
|
|
|
|
|
|
|
job_summary = self.summarize_one_openqa_job(job)
|
2017-07-21 15:04:33 +02:00
|
|
|
if job_summary is None:
|
|
|
|
groups[gl]['unfinished'] = groups[gl]['unfinished'] + 1
|
|
|
|
continue
|
|
|
|
# None vs ''
|
2017-07-19 14:18:29 +02:00
|
|
|
if not len(job_summary):
|
|
|
|
groups[gl]['passed'] = groups[gl]['passed'] + 1
|
|
|
|
continue
|
|
|
|
# if there is something to report, hold the request
|
|
|
|
qa_state = QA_FAILED
|
|
|
|
gmsg = groups[gl]
|
|
|
|
groups[gl]['failed'].append(job_summary)
|
|
|
|
|
|
|
|
msg = ''
|
|
|
|
for group in sorted(groups.keys()):
|
|
|
|
msg += "\n\n" + groups[group]['title']
|
2017-07-21 15:04:33 +02:00
|
|
|
infos = []
|
|
|
|
if groups[group]['passed']:
|
|
|
|
infos.append("%d tests passed" % groups[group]['passed'])
|
|
|
|
if len(groups[group]['failed']):
|
|
|
|
infos.append("%d tests failed" % len(groups[group]['failed']))
|
|
|
|
if groups[group]['unfinished']:
|
|
|
|
infos.append("%d unfinished tests" % groups[group]['unfinished'])
|
|
|
|
msg += "(" + ', '.join(infos) + ")\n"
|
2017-07-19 14:18:29 +02:00
|
|
|
for fail in groups[group]['failed']:
|
|
|
|
msg += fail
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
def check_one_request(self, req):
|
|
|
|
ret = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
jobs = self.request_get_openqa_jobs(req)
|
|
|
|
qa_state = self.calculate_qa_status(jobs)
|
|
|
|
self.logger.debug("request %s state %s", req.reqid, qa_state)
|
|
|
|
msg = None
|
|
|
|
if self.force or qa_state == QA_UNKNOWN:
|
|
|
|
ret = ReviewBot.ReviewBot.check_one_request(self, req)
|
|
|
|
jobs = self.request_get_openqa_jobs(req)
|
|
|
|
|
|
|
|
if self.force:
|
|
|
|
# make sure to delete previous comments if we're forcing
|
2017-07-19 14:18:29 +02:00
|
|
|
info = self.find_obs_request_comment(request_id=req.reqid)
|
|
|
|
if 'id' in info:
|
|
|
|
self.logger.debug("deleting old comment %s", info['id'])
|
2016-03-18 09:49:09 +01:00
|
|
|
if not self.dryrun:
|
2017-07-19 14:18:29 +02:00
|
|
|
self.commentapi.delete(info['id'])
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
if not jobs:
|
|
|
|
msg = "no openQA tests defined"
|
2017-07-21 15:04:33 +02:00
|
|
|
self.add_comment(msg, 'done', request_id=req.reqid, result='accepted')
|
2016-03-18 09:49:09 +01:00
|
|
|
ret = True
|
|
|
|
else:
|
2017-02-08 13:59:40 +01:00
|
|
|
# no notification until the result is done
|
2017-07-09 14:44:36 +02:00
|
|
|
osc.core.change_review_state(self.apiurl, req.reqid, newstate='new',
|
2017-02-08 13:59:40 +01:00
|
|
|
by_group=self.review_group, by_user=self.review_user,
|
2017-02-08 19:58:28 +01:00
|
|
|
message='now testing in openQA')
|
2016-03-18 09:49:09 +01:00
|
|
|
elif qa_state == QA_FAILED or qa_state == QA_PASSED:
|
2016-07-29 12:43:16 +02:00
|
|
|
# don't take test repo results into the calculation of total
|
|
|
|
# this is for humans to decide which incident broke the test repo
|
|
|
|
jobs += self.request_get_openqa_jobs(req, incident=False, test_repo=True)
|
|
|
|
if self.calculate_qa_status(jobs) == QA_INPROGRESS:
|
2017-07-07 09:16:35 +02:00
|
|
|
self.logger.debug(
|
|
|
|
"incident tests for request %s are done, but need to wait for test repo", req.reqid)
|
2016-07-29 12:43:16 +02:00
|
|
|
return
|
2016-03-18 09:49:09 +01:00
|
|
|
if qa_state == QA_PASSED:
|
2016-12-09 14:03:55 +01:00
|
|
|
msg = "openQA tests passed\n"
|
2017-07-21 15:04:33 +02:00
|
|
|
result = 'accepted'
|
2016-03-18 09:49:09 +01:00
|
|
|
ret = True
|
|
|
|
else:
|
2016-12-09 14:03:55 +01:00
|
|
|
msg = "openQA tests problematic\n"
|
2017-07-21 15:04:33 +02:00
|
|
|
result = 'declined'
|
2016-03-18 09:49:09 +01:00
|
|
|
ret = False
|
2016-07-28 21:21:47 +02:00
|
|
|
|
2017-07-19 14:18:29 +02:00
|
|
|
msg += self.summarize_openqa_jobs(jobs)
|
2017-07-21 15:04:33 +02:00
|
|
|
self.add_comment(msg, 'done', result=result, request_id=req.reqid)
|
2016-03-18 09:49:09 +01:00
|
|
|
elif qa_state == QA_INPROGRESS:
|
|
|
|
self.logger.debug("request %s still in progress", req.reqid)
|
|
|
|
else:
|
|
|
|
raise Exception("unknown QA state %d", qa_state)
|
|
|
|
|
2016-10-19 15:37:56 +02:00
|
|
|
except Exception:
|
2016-03-18 09:49:09 +01:00
|
|
|
import traceback
|
|
|
|
self.logger.error("unhandled exception in openQA Bot")
|
|
|
|
self.logger.error(traceback.format_exc())
|
|
|
|
ret = None
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2017-07-29 18:31:01 +02:00
|
|
|
def find_obs_request_comment(self, request_id=None, project_name=None):
|
2016-03-18 09:49:09 +01:00
|
|
|
"""Return previous comments (should be one)."""
|
|
|
|
if self.do_comments:
|
2017-07-19 14:18:29 +02:00
|
|
|
comments = self.commentapi.get_comments(request_id=request_id, project_name=project_name)
|
2016-03-18 09:49:09 +01:00
|
|
|
for c in comments.values():
|
|
|
|
m = comment_marker_re.match(c['comment'])
|
2017-07-29 18:31:01 +02:00
|
|
|
if m:
|
2017-07-26 14:33:46 +02:00
|
|
|
return {'id': c['id'], 'state': m.group('state'), 'result': m.group('result'), 'comment': c['comment'], 'revision': m.group('revision')}
|
2017-07-19 14:18:29 +02:00
|
|
|
return {}
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2017-07-09 14:44:36 +02:00
|
|
|
def check_product(self, job, product_prefix):
|
|
|
|
pmap = API_MAP[product_prefix]
|
2017-07-18 21:09:22 +02:00
|
|
|
posts = []
|
2017-07-09 14:44:36 +02:00
|
|
|
for arch in pmap['archs']:
|
|
|
|
need = False
|
2017-07-26 14:33:46 +02:00
|
|
|
settings = {'VERSION': pmap['version'], 'ARCH': arch, 'DISTRI': 'sle'}
|
2017-07-09 14:44:36 +02:00
|
|
|
issues = pmap.get('issues', {})
|
2017-07-25 15:27:36 +02:00
|
|
|
issues['OS_TEST_ISSUES'] = product_prefix
|
2017-07-09 14:44:36 +02:00
|
|
|
for key, prefix in issues.items():
|
|
|
|
if prefix + arch in job['channels']:
|
|
|
|
settings[key] = str(job['id'])
|
|
|
|
need = True
|
|
|
|
if need:
|
2017-07-18 21:09:22 +02:00
|
|
|
u = PROJECT_OPENQA_SETTINGS[product_prefix + arch]
|
2017-07-09 14:44:36 +02:00
|
|
|
u.apiurl = self.apiurl
|
2017-07-26 14:33:46 +02:00
|
|
|
for s in u.settings(u.maintenance_project() + ':' + str(job['id']), product_prefix + arch, []):
|
2017-07-09 14:44:36 +02:00
|
|
|
if job.get('openqa_build') is None:
|
2017-07-18 21:09:22 +02:00
|
|
|
job['openqa_build'] = u.get_max_revision(job)
|
2017-07-09 14:44:36 +02:00
|
|
|
if job.get('openqa_build') is None:
|
2017-07-18 21:09:22 +02:00
|
|
|
return []
|
2017-07-09 14:44:36 +02:00
|
|
|
s['BUILD'] += '.' + str(job['openqa_build'])
|
|
|
|
s.update(settings)
|
2017-07-18 21:09:22 +02:00
|
|
|
posts.append(s)
|
|
|
|
return posts
|
2017-07-09 14:44:36 +02:00
|
|
|
|
2017-07-27 15:00:50 +02:00
|
|
|
def incident_openqa_jobs(self, s):
|
|
|
|
return self.openqa.openqa_request(
|
|
|
|
'GET', 'jobs',
|
|
|
|
{
|
|
|
|
'distri': s['DISTRI'],
|
|
|
|
'version': s['VERSION'],
|
|
|
|
'arch': s['ARCH'],
|
|
|
|
'flavor': s['FLAVOR'],
|
|
|
|
'build': s['BUILD'],
|
|
|
|
'scope': 'relevant',
|
|
|
|
'latest': '1'
|
|
|
|
})['jobs']
|
|
|
|
|
|
|
|
def check_suse_incidents(self):
|
2017-07-09 14:44:36 +02:00
|
|
|
for inc in requests.get('https://maintenance.suse.de/api/incident/active/').json():
|
2017-07-29 18:31:01 +02:00
|
|
|
# if not inc in ['5219']: continue
|
2017-07-27 15:00:50 +02:00
|
|
|
# if not inc.startswith('52'): continue
|
2017-07-25 15:27:36 +02:00
|
|
|
print inc
|
2017-07-26 14:33:46 +02:00
|
|
|
# continue
|
2017-07-09 14:44:36 +02:00
|
|
|
job = requests.get('https://maintenance.suse.de/api/incident/' + inc).json()
|
|
|
|
if job['meta']['state'] in ['final', 'gone']:
|
|
|
|
continue
|
2017-07-27 15:00:50 +02:00
|
|
|
# required in job: project, id, channels
|
|
|
|
self.test_job(job['base'])
|
|
|
|
|
|
|
|
def test_job(self, job):
|
|
|
|
incident_project = str(job['project'])
|
|
|
|
comment_info = self.find_obs_request_comment(project_name=incident_project)
|
|
|
|
comment_id = comment_info.get('id', None)
|
2017-07-31 13:23:33 +02:00
|
|
|
comment_build = str(comment_info.get('revision', ''))
|
2017-07-27 15:00:50 +02:00
|
|
|
|
|
|
|
openqa_posts = []
|
|
|
|
for prod in API_MAP.keys():
|
|
|
|
openqa_posts += self.check_product(job, prod)
|
|
|
|
openqa_jobs = []
|
|
|
|
for s in openqa_posts:
|
|
|
|
jobs = self.incident_openqa_jobs(s)
|
|
|
|
# take the project comment as marker for not posting jobs
|
2017-07-31 13:23:33 +02:00
|
|
|
if not len(jobs) and comment_build != str(job['openqa_build']):
|
2017-07-27 15:00:50 +02:00
|
|
|
if self.dryrun:
|
|
|
|
print 'WOULD POST', json.dumps(s, sort_keys=True)
|
2017-07-18 21:09:22 +02:00
|
|
|
else:
|
2017-07-27 15:00:50 +02:00
|
|
|
ret = self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
2017-07-31 13:23:33 +02:00
|
|
|
openqa_jobs += self.incident_openqa_jobs(s)
|
2017-07-27 15:00:50 +02:00
|
|
|
else:
|
|
|
|
print s, 'got', len(jobs)
|
|
|
|
openqa_jobs += jobs
|
2017-07-27 21:16:45 +02:00
|
|
|
self.openqa_jobs[incident_project] = openqa_jobs
|
2017-07-27 15:00:50 +02:00
|
|
|
if len(openqa_jobs) == 0:
|
|
|
|
self.logger.debug("No openqa jobs defined")
|
|
|
|
return
|
|
|
|
# print openqa_jobs
|
|
|
|
msg = self.summarize_openqa_jobs(openqa_jobs)
|
|
|
|
state = 'seen'
|
|
|
|
result = 'none'
|
|
|
|
qa_status = self.calculate_qa_status(openqa_jobs)
|
|
|
|
if qa_status == QA_PASSED:
|
|
|
|
result = 'accepted'
|
|
|
|
state = 'done'
|
|
|
|
if qa_status == QA_FAILED:
|
|
|
|
result = 'declined'
|
|
|
|
state = 'done'
|
|
|
|
comment = "<!-- openqa state=%s result=%s revision=%s -->\n" % (state, result, job.get('openqa_build'))
|
|
|
|
comment += "\nCC @coolo\n" + msg
|
|
|
|
|
|
|
|
if comment_id and state != 'done':
|
|
|
|
self.logger.debug("%s is already commented, wait until done", incident_project)
|
|
|
|
return
|
|
|
|
if comment_info.get('comment', '') == comment:
|
|
|
|
self.logger.debug("%s comment did not change", incident_project)
|
|
|
|
return
|
2017-07-19 14:18:29 +02:00
|
|
|
|
2017-07-27 15:00:50 +02:00
|
|
|
self.logger.debug("adding comment to %s, state %s", incident_project, state)
|
|
|
|
#self.logger.debug("message: %s", msg)
|
|
|
|
if not self.dryrun:
|
|
|
|
if comment_id is not None:
|
|
|
|
self.commentapi.delete(comment_id)
|
|
|
|
self.commentapi.add_comment(project_name=str(incident_project), comment=str(comment))
|
2017-07-19 14:18:29 +02:00
|
|
|
|
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
class CommandLineInterface(ReviewBot.CommandLineInterface):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ReviewBot.CommandLineInterface.__init__(self, args, kwargs)
|
2017-01-02 02:34:13 -06:00
|
|
|
self.clazz = OpenQABot
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
def get_optparser(self):
|
|
|
|
parser = ReviewBot.CommandLineInterface.get_optparser(self)
|
|
|
|
parser.add_option("--force", action="store_true", help="recheck requests that are already considered done")
|
2017-07-07 09:16:35 +02:00
|
|
|
parser.add_option("--no-comment", dest='comment', action="store_false",
|
|
|
|
default=True, help="don't actually post comments to obs")
|
2016-03-18 09:49:09 +01:00
|
|
|
parser.add_option("--openqa", metavar='HOST', help="openqa api host")
|
|
|
|
return parser
|
|
|
|
|
|
|
|
def setup_checker(self):
|
2017-01-02 02:34:13 -06:00
|
|
|
bot = ReviewBot.CommandLineInterface.setup_checker(self)
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2017-01-02 02:34:13 -06:00
|
|
|
if self.options.force:
|
|
|
|
bot.force = True
|
|
|
|
bot.do_comments = self.options.comment
|
2016-03-18 09:49:09 +01:00
|
|
|
if not self.options.openqa:
|
2016-04-28 12:32:09 +02:00
|
|
|
raise osc.oscerr.WrongArgs("missing openqa url")
|
2017-01-02 02:34:13 -06:00
|
|
|
bot.openqa = OpenQA_Client(server=self.options.openqa)
|
2016-03-18 09:49:09 +01:00
|
|
|
|
|
|
|
global logger
|
|
|
|
logger = self.logger
|
|
|
|
|
2017-01-02 02:34:13 -06:00
|
|
|
return bot
|
2016-03-18 09:49:09 +01:00
|
|
|
|
2017-07-07 09:16:35 +02:00
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
if __name__ == "__main__":
|
2017-01-11 13:35:33 +01:00
|
|
|
requests_log = logging.getLogger("requests.packages.urllib3")
|
|
|
|
requests_log.setLevel(logging.WARNING)
|
|
|
|
requests_log.propagate = False
|
|
|
|
|
2016-03-18 09:49:09 +01:00
|
|
|
app = CommandLineInterface()
|
|
|
|
sys.exit(app.main())
|
|
|
|
|