Merge pull request #1025 from coolo/more_incidents
Major rework of the openqa bot
This commit is contained in:
commit
bd162b9dd5
74
data/apimap.json
Normal file
74
data/apimap.json
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
{
|
||||||
|
"SUSE:Updates:SLE-SERVER:12-SP1-LTSS:" : {
|
||||||
|
"flavor" : "Server-DVD-Incidents",
|
||||||
|
"version" : "12-SP1",
|
||||||
|
"archs" : [
|
||||||
|
"x86_64",
|
||||||
|
"s390x",
|
||||||
|
"ppc64le"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"SUSE:Updates:SLE-DESKTOP:12-SP2:" : {
|
||||||
|
"issues" : {
|
||||||
|
"SDK_TEST_ISSUES" : "SUSE:Updates:SLE-SDK:12-SP2:"
|
||||||
|
},
|
||||||
|
"version" : "12-SP2",
|
||||||
|
"flavor" : "Desktop-DVD-Incidents",
|
||||||
|
"archs" : [
|
||||||
|
"x86_64"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"SUSE:Updates:SLE-SERVER:12-LTSS:" : {
|
||||||
|
"version" : "12",
|
||||||
|
"flavor" : "Server-DVD-Incidents",
|
||||||
|
"archs" : [
|
||||||
|
"x86_64",
|
||||||
|
"s390x",
|
||||||
|
"ppc64le"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"SUSE:Updates:SLE-SERVER:12-SP3:" : {
|
||||||
|
"issues" : {
|
||||||
|
"WE_TEST_ISSUES" : "SUSE:Updates:SLE-WE:12-SP3:",
|
||||||
|
"TCM_TEST_ISSUES" : "SUSE:Maintenance:Test:SLE-Module-Toolchain:12:",
|
||||||
|
"HPCM_TEST_ISSUES" : "SUSE:Updates:SLE-Module-HPC:12:",
|
||||||
|
"SDK_TEST_ISSUES" : "SUSE:Updates:SLE-SDK:12-SP3:",
|
||||||
|
"WSM_TEST_ISSUES" : "SUSE:Maintenance:Test:SLE-Module-Web-Scripting:12:"
|
||||||
|
},
|
||||||
|
"flavor" : "Server-DVD-Incidents",
|
||||||
|
"version" : "12-SP3",
|
||||||
|
"archs" : [
|
||||||
|
"x86_64",
|
||||||
|
"s390x",
|
||||||
|
"ppc64le",
|
||||||
|
"aarch64"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"SUSE:Updates:SLE-DESKTOP:12-SP3:" : {
|
||||||
|
"version" : "12-SP3",
|
||||||
|
"issues" : {
|
||||||
|
"SDK_TEST_ISSUES" : "SUSE:Updates:SLE-SDK:12-SP3:"
|
||||||
|
},
|
||||||
|
"flavor" : "Desktop-DVD-Incidents",
|
||||||
|
"archs" : [
|
||||||
|
"x86_64"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"SUSE:Updates:SLE-SERVER:12-SP2:" : {
|
||||||
|
"version" : "12-SP2",
|
||||||
|
"issues" : {
|
||||||
|
"WE_TEST_ISSUES" : "SUSE:Updates:SLE-WE:12-SP2:",
|
||||||
|
"TCM_TEST_ISSUES" : "SUSE:Maintenance:Test:SLE-Module-Toolchain:12:",
|
||||||
|
"SDK_TEST_ISSUES" : "SUSE:Updates:SLE-SDK:12-SP2:",
|
||||||
|
"HPCM_TEST_ISSUES" : "SUSE:Updates:SLE-Module-HPC:12:",
|
||||||
|
"WSM_TEST_ISSUES" : "SUSE:Maintenance:Test:SLE-Module-Web-Scripting:12:"
|
||||||
|
},
|
||||||
|
"flavor" : "Server-DVD-Incidents",
|
||||||
|
"archs" : [
|
||||||
|
"x86_64",
|
||||||
|
"s390x",
|
||||||
|
"ppc64le",
|
||||||
|
"aarch64"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
@ -1,180 +1,125 @@
|
|||||||
{
|
{
|
||||||
"SUSE:Updates:SLE-Live-Patching:12:x86_64": [
|
"SUSE:Updates:SLE-SERVER:12-LTSS:s390x" : {
|
||||||
{
|
"DISTRI" : "sle",
|
||||||
"ARCH": "x86_64",
|
"ARCH" : "s390x",
|
||||||
"DISTRI": "sle",
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
"FLAVOR": "KGraft",
|
"VERSION" : "12"
|
||||||
"VERSION": "12"
|
},
|
||||||
},
|
"SUSE:Updates:SLE-SERVER:12-LTSS:ppc64le" : {
|
||||||
{
|
"DISTRI" : "sle",
|
||||||
"ARCH": "x86_64",
|
"ARCH" : "ppc64le",
|
||||||
"DISTRI": "sle",
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
"FLAVOR": "Server-DVD-Incidents-Kernel",
|
"VERSION" : "12"
|
||||||
"VERSION": "12",
|
},
|
||||||
"KGRAFT": "1"
|
"SUSE:Updates:SLE-SERVER:12-SP1-LTSS:ppc64le" : {
|
||||||
}
|
"DISTRI" : "sle",
|
||||||
],
|
"ARCH" : "ppc64le",
|
||||||
"SUSE:Updates:SLE-Live-Patching:12-SP3:x86_64": [
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
{
|
"VERSION" : "12-SP1"
|
||||||
"ARCH": "x86_64",
|
},
|
||||||
"DISTRI": "sle",
|
"openSUSE:Leap:42.3:Update" : {
|
||||||
"FLAVOR": "Server-DVD-Incidents-Kernel",
|
"FLAVOR" : "Maintenance",
|
||||||
"VERSION": "12-SP3",
|
"VERSION" : "42.3",
|
||||||
"KGRAFT": "1"
|
"DISTRI" : "opensuse",
|
||||||
}
|
"ARCH" : "x86_64",
|
||||||
],
|
"ISO" : "openSUSE-Leap-42.3-DVD-x86_64.iso"
|
||||||
"SUSE:Updates:SLE-SERVER:12-LTSS:ppc64le": [
|
},
|
||||||
{
|
"SUSE:Updates:SLE-DESKTOP:12-SP2:x86_64" : {
|
||||||
"ARCH": "ppc64le",
|
"FLAVOR" : "Desktop-DVD-Incidents",
|
||||||
"DISTRI": "sle",
|
"VERSION" : "12-SP2",
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"DISTRI" : "sle",
|
||||||
"VERSION": "12"
|
"ARCH" : "x86_64"
|
||||||
}
|
},
|
||||||
],
|
"SUSE:Updates:SLE-DESKTOP:12-SP3:x86_64" : {
|
||||||
"SUSE:Updates:SLE-SERVER:12-LTSS:s390x": [
|
"FLAVOR" : "Desktop-DVD-Incidents",
|
||||||
{
|
"VERSION" : "12-SP3",
|
||||||
"ARCH": "s390x",
|
"DISTRI" : "sle",
|
||||||
"DISTRI": "sle",
|
"ARCH" : "x86_64"
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
},
|
||||||
"VERSION": "12"
|
"SUSE:Updates:SLE-SERVER:12-SP2:s390x" : {
|
||||||
}
|
"DISTRI" : "sle",
|
||||||
],
|
"ARCH" : "s390x",
|
||||||
"SUSE:Updates:SLE-SERVER:12-LTSS:x86_64": [
|
"VERSION" : "12-SP2",
|
||||||
{
|
"FLAVOR" : "Server-DVD-Incidents"
|
||||||
"ARCH": "x86_64",
|
},
|
||||||
"DISTRI": "sle",
|
"SUSE:Updates:SLE-SERVER:12-SP3:s390x" : {
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"DISTRI" : "sle",
|
||||||
"VERSION": "12"
|
"ARCH" : "s390x",
|
||||||
},
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
{
|
"VERSION" : "12-SP3"
|
||||||
"ARCH": "x86_64",
|
},
|
||||||
"DISTRI": "sle",
|
"SUSE:Updates:SLE-Live-Patching:12-SP3:x86_64" : {
|
||||||
"FLAVOR": "Server-DVD-Incidents-Kernel",
|
"DISTRI" : "sle",
|
||||||
"VERSION": "12"
|
"ARCH" : "x86_64",
|
||||||
}
|
"VERSION" : "12-SP3",
|
||||||
],
|
"FLAVOR" : "Server-DVD-Incidents-Kernel",
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP1-LTSS:ppc64le": [
|
"KGRAFT" : "1"
|
||||||
{
|
},
|
||||||
"ARCH": "ppc64le",
|
"SUSE:Updates:SLE-SERVER:12-SP2:aarch64" : {
|
||||||
"DISTRI": "sle",
|
"DISTRI" : "sle",
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"ARCH" : "aarch64",
|
||||||
"VERSION": "12-SP1"
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
}
|
"VERSION" : "12-SP2"
|
||||||
],
|
},
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP1-LTSS:s390x": [
|
"SUSE:Updates:SLE-SERVER:12-SP3:x86_64" : {
|
||||||
{
|
"VERSION" : "12-SP3",
|
||||||
"ARCH": "s390x",
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
"DISTRI": "sle",
|
"DISTRI" : "sle",
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"ARCH" : "x86_64"
|
||||||
"VERSION": "12-SP1"
|
},
|
||||||
}
|
"SUSE:Updates:SLE-SERVER:12-SP3:ppc64le" : {
|
||||||
],
|
"VERSION" : "12-SP3",
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP1-LTSS:x86_64": [
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
{
|
"ARCH" : "ppc64le",
|
||||||
"ARCH": "x86_64",
|
"DISTRI" : "sle"
|
||||||
"DISTRI": "sle",
|
},
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"SUSE:Updates:SLE-Live-Patching:12:x86_64" : {
|
||||||
"VERSION": "12-SP1"
|
"FLAVOR" : "KGraft",
|
||||||
},
|
"VERSION" : "12",
|
||||||
{
|
"ARCH" : "x86_64",
|
||||||
"ARCH": "x86_64",
|
"DISTRI" : "sle"
|
||||||
"DISTRI": "sle",
|
},
|
||||||
"FLAVOR": "Server-DVD-Incidents-Kernel",
|
"SUSE:Updates:SLE-SERVER:12-SP3:aarch64" : {
|
||||||
"VERSION": "12-SP1"
|
"VERSION" : "12-SP3",
|
||||||
}
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
],
|
"DISTRI" : "sle",
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP2:aarch64": [
|
"ARCH" : "aarch64"
|
||||||
{
|
},
|
||||||
"ARCH": "aarch64",
|
"SUSE:Updates:SLE-SERVER:12-SP1-LTSS:x86_64" : {
|
||||||
"DISTRI": "sle",
|
"VERSION" : "12-SP1",
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
"VERSION": "12-SP2"
|
"DISTRI" : "sle",
|
||||||
}
|
"ARCH" : "x86_64"
|
||||||
],
|
},
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP2:ppc64le": [
|
"SUSE:Updates:SLE-SERVER:12-SP2:ppc64le" : {
|
||||||
{
|
"VERSION" : "12-SP2",
|
||||||
"ARCH": "ppc64le",
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
"DISTRI": "sle",
|
"ARCH" : "ppc64le",
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"DISTRI" : "sle"
|
||||||
"VERSION": "12-SP2"
|
},
|
||||||
}
|
"SUSE:Updates:SLE-SERVER:12-SP1-LTSS:s390x" : {
|
||||||
],
|
"ARCH" : "s390x",
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP2:s390x": [
|
"DISTRI" : "sle",
|
||||||
{
|
"VERSION" : "12-SP1",
|
||||||
"ARCH": "s390x",
|
"FLAVOR" : "Server-DVD-Incidents"
|
||||||
"DISTRI": "sle",
|
},
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"SUSE:Updates:SLE-SERVER:12-LTSS:x86_64" : {
|
||||||
"VERSION": "12-SP2"
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
}
|
"VERSION" : "12",
|
||||||
],
|
"DISTRI" : "sle",
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP2:x86_64": [
|
"ARCH" : "x86_64"
|
||||||
{
|
},
|
||||||
"ARCH": "x86_64",
|
"openSUSE:Leap:42.2:Update" : {
|
||||||
"DISTRI": "sle",
|
"DISTRI" : "opensuse",
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
"ARCH" : "x86_64",
|
||||||
"VERSION": "12-SP2"
|
"ISO" : "openSUSE-Leap-42.2-DVD-x86_64.iso",
|
||||||
},
|
"FLAVOR" : "Maintenance",
|
||||||
{
|
"VERSION" : "42.2"
|
||||||
"ARCH": "x86_64",
|
},
|
||||||
"DISTRI": "sle",
|
"SUSE:Updates:SLE-SERVER:12-SP2:x86_64" : {
|
||||||
"FLAVOR": "Server-DVD-Incidents-Kernel",
|
"VERSION" : "12-SP2",
|
||||||
"VERSION": "12-SP2"
|
"FLAVOR" : "Server-DVD-Incidents",
|
||||||
}
|
"ARCH" : "x86_64",
|
||||||
],
|
"DISTRI" : "sle"
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP3:aarch64": [
|
}
|
||||||
{
|
|
||||||
"ARCH": "aarch64",
|
|
||||||
"DISTRI": "sle",
|
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
|
||||||
"VERSION": "12-SP3"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP3:ppc64le": [
|
|
||||||
{
|
|
||||||
"ARCH": "ppc64le",
|
|
||||||
"DISTRI": "sle",
|
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
|
||||||
"VERSION": "12-SP3"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP3:s390x": [
|
|
||||||
{
|
|
||||||
"ARCH": "s390x",
|
|
||||||
"DISTRI": "sle",
|
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
|
||||||
"VERSION": "12-SP3"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"SUSE:Updates:SLE-SERVER:12-SP3:x86_64": [
|
|
||||||
{
|
|
||||||
"ARCH": "x86_64",
|
|
||||||
"DISTRI": "sle",
|
|
||||||
"FLAVOR": "Server-DVD-Incidents",
|
|
||||||
"VERSION": "12-SP3"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ARCH": "x86_64",
|
|
||||||
"DISTRI": "sle",
|
|
||||||
"FLAVOR": "Server-DVD-Incidents-Kernel",
|
|
||||||
"VERSION": "12-SP3"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"openSUSE:Leap:42.2:Update": [
|
|
||||||
{
|
|
||||||
"ARCH": "x86_64",
|
|
||||||
"DISTRI": "opensuse",
|
|
||||||
"FLAVOR": "Maintenance",
|
|
||||||
"ISO": "openSUSE-Leap-42.2-DVD-x86_64.iso",
|
|
||||||
"VERSION": "42.2"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"openSUSE:Leap:42.3:Update": [
|
|
||||||
{
|
|
||||||
"ARCH": "x86_64",
|
|
||||||
"DISTRI": "opensuse",
|
|
||||||
"FLAVOR": "Maintenance",
|
|
||||||
"ISO": "openSUSE-Leap-42.3-DVD-x86_64.iso",
|
|
||||||
"VERSION": "42.3"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
@ -20,28 +20,32 @@
|
|||||||
# SOFTWARE.
|
# SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
import io
|
||||||
import os.path as opa
|
import os.path as opa
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from datetime import date
|
from datetime import date
|
||||||
import md5
|
import md5
|
||||||
|
import cmdln
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
from simplejson import JSONDecodeError
|
from simplejson import JSONDecodeError
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
from collections import namedtuple
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
try:
|
try:
|
||||||
from xml.etree import cElementTree as ET
|
from xml.etree import cElementTree as ET
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import cElementTree as ET
|
import cElementTree as ET
|
||||||
|
|
||||||
|
import gzip
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
import osc.conf
|
import osc.conf
|
||||||
import osc.core
|
import osc.core
|
||||||
|
from pprint import pprint
|
||||||
from osclib.comments import CommentAPI
|
from osclib.comments import CommentAPI
|
||||||
|
from osclib.memoize import memoize
|
||||||
|
|
||||||
import ReviewBot
|
import ReviewBot
|
||||||
|
|
||||||
@ -58,11 +62,12 @@ QA_INPROGRESS = 1
|
|||||||
QA_FAILED = 2
|
QA_FAILED = 2
|
||||||
QA_PASSED = 3
|
QA_PASSED = 3
|
||||||
|
|
||||||
comment_marker_re = re.compile(r'<!-- openqa state=(?P<state>done|seen)(?: result=(?P<result>accepted|declined))? -->')
|
comment_marker_re = re.compile(
|
||||||
|
r'<!-- openqa state=(?P<state>done|seen)(?: result=(?P<result>accepted|declined|none))?(?: revision=(?P<revision>\d+))? -->')
|
||||||
|
|
||||||
logger = None
|
logger = None
|
||||||
|
|
||||||
request_name_cache = {}
|
incident_name_cache = {}
|
||||||
|
|
||||||
# old stuff, for reference
|
# old stuff, for reference
|
||||||
# def filterchannel(self, apiurl, prj, packages):
|
# def filterchannel(self, apiurl, prj, packages):
|
||||||
@ -91,6 +96,17 @@ with open(opa.join(data_path, "data/kgraft.json"), 'r') as f:
|
|||||||
with open(opa.join(data_path, "data/repos.json"), 'r') as f:
|
with open(opa.join(data_path, "data/repos.json"), 'r') as f:
|
||||||
TARGET_REPO_SETTINGS = json.load(f)
|
TARGET_REPO_SETTINGS = json.load(f)
|
||||||
|
|
||||||
|
with open(opa.join(data_path, "data/apimap.json"), 'r') as f:
|
||||||
|
API_MAP = json.load(f)
|
||||||
|
|
||||||
|
MINIMALS = set()
|
||||||
|
minimals = requests.get(
|
||||||
|
'https://gitlab.suse.de/qa-maintenance/metadata/raw/master/packages-to-be-tested-on-minimal-systems')
|
||||||
|
for line in minimals.text.split('\n'):
|
||||||
|
if line.startswith('#') or line.startswith(' ') or len(line) == 0:
|
||||||
|
continue
|
||||||
|
MINIMALS.add(line)
|
||||||
|
|
||||||
|
|
||||||
class Update(object):
|
class Update(object):
|
||||||
|
|
||||||
@ -98,116 +114,99 @@ class Update(object):
|
|||||||
self._settings = settings
|
self._settings = settings
|
||||||
self._settings['_NOOBSOLETEBUILD'] = '1'
|
self._settings['_NOOBSOLETEBUILD'] = '1'
|
||||||
|
|
||||||
def settings(self, src_prj, dst_prj, packages, req):
|
def get_max_revision(self, job):
|
||||||
|
repo = self.repo_prefix() + '/'
|
||||||
|
repo += self.maintenance_project().replace(':', ':/')
|
||||||
|
repo += ':/%s' % str(job['id'])
|
||||||
|
max_revision = 0
|
||||||
|
for channel in job['channels']:
|
||||||
|
crepo = repo + '/' + channel.replace(':', '_')
|
||||||
|
xml = requests.get(crepo + '/repodata/repomd.xml')
|
||||||
|
if not xml.ok:
|
||||||
|
# if one fails, we skip it and wait
|
||||||
|
print crepo, 'has no repodata - waiting'
|
||||||
|
return None
|
||||||
|
root = ET.fromstring(xml.text)
|
||||||
|
rev = root.find('.//{http://linux.duke.edu/metadata/repo}revision')
|
||||||
|
rev = int(rev.text)
|
||||||
|
if rev > max_revision:
|
||||||
|
max_revision = rev
|
||||||
|
return max_revision
|
||||||
|
|
||||||
|
def settings(self, src_prj, dst_prj, packages):
|
||||||
s = self._settings.copy()
|
s = self._settings.copy()
|
||||||
|
|
||||||
# start with a colon so it looks cool behind 'Build' :/
|
# start with a colon so it looks cool behind 'Build' :/
|
||||||
s['BUILD'] = ':' + req.reqid + '.' + self.request_name(req)
|
s['BUILD'] = ':' + src_prj.split(':')[-1]
|
||||||
s['INCIDENT_REPO'] = '%s/%s/%s/' % (self.repo_prefix(), src_prj.replace(':', ':/'), dst_prj.replace(':', '_'))
|
name = self.incident_name(src_prj)
|
||||||
|
repo = dst_prj.replace(':', '_')
|
||||||
|
repo = '%s/%s/%s/' % (self.repo_prefix(), src_prj.replace(':', ':/'), repo)
|
||||||
|
patch_id = self.patch_id(repo)
|
||||||
|
if patch_id:
|
||||||
|
s['INCIDENT_REPO'] = repo
|
||||||
|
s['INCIDENT_PATCH'] = self.patch_id(repo)
|
||||||
|
s['BUILD'] += ':' + name
|
||||||
|
return [s]
|
||||||
|
|
||||||
return s
|
@memoize()
|
||||||
|
def incident_packages(self, prj):
|
||||||
|
packages = []
|
||||||
|
for package in osc.core.meta_get_packagelist(self.apiurl, prj):
|
||||||
|
if package.startswith('patchinfo'):
|
||||||
|
continue
|
||||||
|
if package.endswith('SUSE_Channels'):
|
||||||
|
continue
|
||||||
|
parts = package.split('.')
|
||||||
|
# remove target name
|
||||||
|
parts.pop()
|
||||||
|
packages.append('.'.join(parts))
|
||||||
|
return packages
|
||||||
|
|
||||||
def calculate_lastest_good_updates(self, openqa, settings):
|
# grab the updateinfo from the given repo and return its patch's id
|
||||||
# not touching anything by default
|
def patch_id(self, repo):
|
||||||
pass
|
url = repo + 'repodata/repomd.xml'
|
||||||
|
repomd = requests.get(url)
|
||||||
|
if not repomd.ok:
|
||||||
|
return None
|
||||||
|
root = ET.fromstring(repomd.text)
|
||||||
|
|
||||||
|
cs = root.find(
|
||||||
|
'.//{http://linux.duke.edu/metadata/repo}data[@type="updateinfo"]/{http://linux.duke.edu/metadata/repo}location')
|
||||||
|
url = repo + cs.attrib['href']
|
||||||
|
|
||||||
|
# python 3 brings gzip.decompress, but with python 2 we need manual io.BytesIO
|
||||||
|
repomd = requests.get(url).content
|
||||||
|
with gzip.GzipFile(fileobj=io.BytesIO(repomd)) as f:
|
||||||
|
root = ET.fromstring(f.read())
|
||||||
|
return root.find('.//id').text
|
||||||
|
|
||||||
# take the first package name we find - often enough correct
|
# take the first package name we find - often enough correct
|
||||||
def request_name(self, req):
|
def incident_name(self, prj):
|
||||||
if req.reqid not in request_name_cache:
|
if prj not in incident_name_cache:
|
||||||
request_name_cache[req.reqid] = self._request_name(req)
|
incident_name_cache[prj] = self._incident_name(prj)
|
||||||
return request_name_cache[req.reqid]
|
return incident_name_cache[prj]
|
||||||
|
|
||||||
def _request_name(self, req):
|
def _incident_name(self, prj):
|
||||||
for action in req.get_actions('maintenance_release'):
|
shortest_pkg = None
|
||||||
if action.tgt_package.startswith('patchinfo'):
|
for package in osc.core.meta_get_packagelist(self.apiurl, prj):
|
||||||
|
if package.startswith('patchinfo'):
|
||||||
|
continue
|
||||||
|
if package.endswith('SUSE_Channels'):
|
||||||
continue
|
continue
|
||||||
url = osc.core.makeurl(
|
url = osc.core.makeurl(
|
||||||
req.apiurl,
|
self.apiurl,
|
||||||
('source', action.src_project, action.src_package, '_link'))
|
('source', prj, package, '_link'))
|
||||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
||||||
if root.attrib.get('cicount'):
|
if root.attrib.get('cicount'):
|
||||||
continue
|
continue
|
||||||
return action.tgt_package
|
if not shortest_pkg or len(package) < len(shortest_pkg):
|
||||||
|
shortest_pkg = package
|
||||||
return 'unknown'
|
if not shortest_pkg:
|
||||||
|
shortest_pkg = 'unknown'
|
||||||
|
match = re.match(r'^(.*)\.[^\.]*$', shortest_pkg)
|
||||||
class SUSEUpdate(Update):
|
if match:
|
||||||
|
return match.group(1)
|
||||||
def repo_prefix(self):
|
return shortest_pkg
|
||||||
return 'http://download.suse.de/ibs'
|
|
||||||
|
|
||||||
# we take requests that have a kgraft-patch package as kgraft patch (suprise!)
|
|
||||||
@staticmethod
|
|
||||||
def kgraft_target(req):
|
|
||||||
target = None
|
|
||||||
action = None
|
|
||||||
skip = False
|
|
||||||
pattern = re.compile(r"kgraft-patch-([^.]+)\.")
|
|
||||||
|
|
||||||
if req:
|
|
||||||
for a in req.actions:
|
|
||||||
if a.src_package.startswith("kernel-"):
|
|
||||||
skip = True
|
|
||||||
break
|
|
||||||
match = re.match(pattern, a.src_package)
|
|
||||||
if match:
|
|
||||||
target = match.group(1)
|
|
||||||
action = a
|
|
||||||
if skip:
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
return target, action
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def parse_kgraft_version(kgraft_target):
|
|
||||||
return kgraft_target.lstrip('SLE').split('_')[0]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def kernel_target(req):
|
|
||||||
if req:
|
|
||||||
for a in req.actions:
|
|
||||||
# kernel incidents have kernel-source package (suprise!)
|
|
||||||
if a.src_package.startswith('kernel-source'):
|
|
||||||
return True, a
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
def settings(self, src_prj, dst_prj, packages, req=None):
|
|
||||||
settings = super(SUSEUpdate, self).settings(src_prj, dst_prj, packages, req)
|
|
||||||
|
|
||||||
# special handling for kgraft and kernel incidents
|
|
||||||
if settings['FLAVOR'] in ('KGraft', 'Server-DVD-Incidents-Kernel'):
|
|
||||||
kgraft_target, action = self.kgraft_target(req)
|
|
||||||
# Server-DVD-Incidents-Incidents handling
|
|
||||||
if settings['FLAVOR'] == 'Server-DVD-Incidents-Kernel':
|
|
||||||
kernel_target, kaction = self.kernel_target(req)
|
|
||||||
if kernel_target or kgraft_target:
|
|
||||||
# incident_id as part of BUILD
|
|
||||||
if kgraft_target:
|
|
||||||
incident_id = re.match(r".*:(\d+)$", action.src_project).group(1)
|
|
||||||
name = '.kgraft.'
|
|
||||||
settings['KGRAFT'] = '1'
|
|
||||||
else:
|
|
||||||
incident_id = re.match(r".*:(\d+)$", kaction.src_project).group(1)
|
|
||||||
name = '.kernel.'
|
|
||||||
|
|
||||||
# discard jobs without 'start'
|
|
||||||
settings['start'] = True
|
|
||||||
settings['BUILD'] = ':' + req.reqid + name + incident_id
|
|
||||||
if kgraft_target:
|
|
||||||
settings['VERSION'] = self.parse_kgraft_version(kgraft_target)
|
|
||||||
# ignore kgraft patches without defined target
|
|
||||||
# they are actually only the base for kgraft
|
|
||||||
if settings['FLAVOR'] == 'KGraft' and kgraft_target and kgraft_target in KGRAFT_SETTINGS:
|
|
||||||
incident_id = re.match(r".*:(\d+)$", action.src_project).group(1)
|
|
||||||
settings.update(KGRAFT_SETTINGS[kgraft_target])
|
|
||||||
settings['BUILD'] = ':' + req.reqid + '.kgraft.' + incident_id
|
|
||||||
settings['MAINT_UPDATE_RRID'] = action.src_project + ':' + req.reqid
|
|
||||||
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
class openSUSEUpdate(Update):
|
|
||||||
|
|
||||||
def calculate_lastest_good_updates(self, openqa, settings):
|
def calculate_lastest_good_updates(self, openqa, settings):
|
||||||
j = openqa.openqa_request(
|
j = openqa.openqa_request(
|
||||||
@ -249,11 +248,117 @@ class openSUSEUpdate(Update):
|
|||||||
if lastgood_prefix:
|
if lastgood_prefix:
|
||||||
settings['LATEST_GOOD_UPDATES_BUILD'] = "%d-%d" % (lastgood_prefix, lastgood_suffix)
|
settings['LATEST_GOOD_UPDATES_BUILD'] = "%d-%d" % (lastgood_prefix, lastgood_suffix)
|
||||||
|
|
||||||
|
|
||||||
|
class SUSEUpdate(Update):
|
||||||
|
|
||||||
|
def repo_prefix(self):
|
||||||
|
return 'http://download.suse.de/ibs'
|
||||||
|
|
||||||
|
def maintenance_project(self):
|
||||||
|
return 'SUSE:Maintenance'
|
||||||
|
|
||||||
|
# we take requests that have a kgraft-patch package as kgraft patch (suprise!)
|
||||||
|
@staticmethod
|
||||||
|
def kgraft_target(apiurl, prj):
|
||||||
|
target = None
|
||||||
|
action = None
|
||||||
|
skip = False
|
||||||
|
pattern = re.compile(r"kgraft-patch-([^.]+)\.")
|
||||||
|
|
||||||
|
for package in osc.core.meta_get_packagelist(apiurl, prj):
|
||||||
|
if package.startswith("kernel-"):
|
||||||
|
skip = True
|
||||||
|
break
|
||||||
|
match = re.match(pattern, package)
|
||||||
|
if match:
|
||||||
|
target = match.group(1)
|
||||||
|
if skip:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return target
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_kgraft_version(kgraft_target):
|
||||||
|
return kgraft_target.lstrip('SLE').split('_')[0]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def kernel_target(req):
|
||||||
|
if req:
|
||||||
|
for a in req.actions:
|
||||||
|
# kernel incidents have kernel-source package (suprise!)
|
||||||
|
if a.src_package.startswith('kernel-source'):
|
||||||
|
return True, a
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def add_minimal_settings(self, prj, settings):
|
||||||
|
minimal = False
|
||||||
|
for pkg in self.incident_packages(prj):
|
||||||
|
if pkg in MINIMALS:
|
||||||
|
minimal = True
|
||||||
|
if not minimal:
|
||||||
|
return []
|
||||||
|
|
||||||
|
settings = settings.copy()
|
||||||
|
settings['FLAVOR'] += '-Minimal'
|
||||||
|
return [settings]
|
||||||
|
|
||||||
|
def add_kernel_settings(self, prj, settings):
|
||||||
|
settings = settings.copy()
|
||||||
|
# not right now
|
||||||
|
return []
|
||||||
|
|
||||||
|
# special handling for kgraft and kernel incidents
|
||||||
|
if settings['FLAVOR'] in ('KGraft', 'Server-DVD-Incidents-Kernel'):
|
||||||
|
kgraft_target = kgraft_target(self.apiurl, src_prj)
|
||||||
|
# Server-DVD-Incidents-Incidents handling
|
||||||
|
if settings['FLAVOR'] == 'Server-DVD-Incidents-Kernel':
|
||||||
|
kernel_target = self.kernel_target(src_prj)
|
||||||
|
if kernel_target or kgraft_target:
|
||||||
|
# incident_id as part of BUILD
|
||||||
|
if kgraft_target:
|
||||||
|
incident_id = re.match(r".*:(\d+)$", src_prj).group(1)
|
||||||
|
name = '.kgraft.'
|
||||||
|
settings['KGRAFT'] = '1'
|
||||||
|
else:
|
||||||
|
incident_id = re.match(r".*:(\d+)$", src_prj).group(1)
|
||||||
|
name = '.kernel.'
|
||||||
|
|
||||||
|
# discard jobs without 'start'
|
||||||
|
settings['start'] = True
|
||||||
|
settings['BUILD'] = ':' + req.reqid + name + incident_id
|
||||||
|
if kgraft_target:
|
||||||
|
settings['VERSION'] = self.parse_kgraft_version(kgraft_target)
|
||||||
|
# ignore kgraft patches without defined target
|
||||||
|
# they are actually only the base for kgraft
|
||||||
|
if settings['FLAVOR'] == 'KGraft' and kgraft_target and kgraft_target in KGRAFT_SETTINGS:
|
||||||
|
incident_id = re.match(r".*:(\d+)$", src_prj).group(1)
|
||||||
|
settings.update(KGRAFT_SETTINGS[kgraft_target])
|
||||||
|
settings['BUILD'] = ':kgraft.' + incident_id
|
||||||
|
# TODO settings['MAINT_UPDATE_RRID'] = src_prj + ':' + req.reqid
|
||||||
|
return settings
|
||||||
|
|
||||||
|
def settings(self, src_prj, dst_prj, packages):
|
||||||
|
settings = super(SUSEUpdate, self).settings(src_prj, dst_prj, packages)
|
||||||
|
if not len(settings):
|
||||||
|
return []
|
||||||
|
|
||||||
|
settings += self.add_kernel_settings(src_prj, settings[0])
|
||||||
|
settings += self.add_minimal_settings(src_prj, settings[0])
|
||||||
|
|
||||||
|
return settings
|
||||||
|
|
||||||
|
|
||||||
|
class openSUSEUpdate(Update):
|
||||||
|
|
||||||
def repo_prefix(self):
|
def repo_prefix(self):
|
||||||
return 'http://download.opensuse.org/repositories'
|
return 'http://download.opensuse.org/repositories'
|
||||||
|
|
||||||
def settings(self, src_prj, dst_prj, packages, req=None):
|
def maintenance_project(self):
|
||||||
settings = super(openSUSEUpdate, self).settings(src_prj, dst_prj, packages, req)
|
return 'openSUSE:Maintenance'
|
||||||
|
|
||||||
|
def settings(self, src_prj, dst_prj, packages):
|
||||||
|
settings = super(openSUSEUpdate, self).settings(src_prj, dst_prj, packages)
|
||||||
|
settings = settings[0]
|
||||||
|
|
||||||
# openSUSE:Maintenance key
|
# openSUSE:Maintenance key
|
||||||
settings['IMPORT_GPG_KEYS'] = 'gpg-pubkey-b3fd7e48-5549fd0f'
|
settings['IMPORT_GPG_KEYS'] = 'gpg-pubkey-b3fd7e48-5549fd0f'
|
||||||
@ -274,17 +379,7 @@ class openSUSEUpdate(Update):
|
|||||||
settings['WITH_MAIN_REPO'] = 1
|
settings['WITH_MAIN_REPO'] = 1
|
||||||
settings['WITH_UPDATE_REPO'] = 1
|
settings['WITH_UPDATE_REPO'] = 1
|
||||||
|
|
||||||
return settings
|
return [settings]
|
||||||
|
|
||||||
|
|
||||||
class TestUpdate(openSUSEUpdate):
|
|
||||||
|
|
||||||
def settings(self, src_prj, dst_prj, packages, req=None):
|
|
||||||
settings = super(TestUpdate, self).settings(src_prj, dst_prj, packages, req)
|
|
||||||
|
|
||||||
settings['IMPORT_GPG_KEYS'] = 'testkey'
|
|
||||||
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
PROJECT_OPENQA_SETTINGS = {}
|
PROJECT_OPENQA_SETTINGS = {}
|
||||||
@ -292,11 +387,11 @@ PROJECT_OPENQA_SETTINGS = {}
|
|||||||
with open(opa.join(data_path, "data/incidents.json"), 'r') as f:
|
with open(opa.join(data_path, "data/incidents.json"), 'r') as f:
|
||||||
for i, j in json.load(f).items():
|
for i, j in json.load(f).items():
|
||||||
if i.startswith('SUSE'):
|
if i.startswith('SUSE'):
|
||||||
PROJECT_OPENQA_SETTINGS[i] = [SUSEUpdate(k) for k in j]
|
PROJECT_OPENQA_SETTINGS[i] = SUSEUpdate(j)
|
||||||
elif i.startswith('openSUSE'):
|
elif i.startswith('openSUSE'):
|
||||||
PROJECT_OPENQA_SETTINGS[i] = [openSUSEUpdate(k) for k in j]
|
PROJECT_OPENQA_SETTINGS[i] = openSUSEUpdate(j)
|
||||||
else:
|
else:
|
||||||
PROJECT_OPENQA_SETTINGS[i] = [TestUpdate(k) for k in j]
|
raise "Unknown openqa", i
|
||||||
|
|
||||||
|
|
||||||
class OpenQABot(ReviewBot.ReviewBot):
|
class OpenQABot(ReviewBot.ReviewBot):
|
||||||
@ -311,6 +406,7 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
self.openqa = None
|
self.openqa = None
|
||||||
self.commentapi = CommentAPI(self.apiurl)
|
self.commentapi = CommentAPI(self.apiurl)
|
||||||
self.update_test_builds = dict()
|
self.update_test_builds = dict()
|
||||||
|
self.openqa_jobs = dict()
|
||||||
|
|
||||||
def gather_test_builds(self):
|
def gather_test_builds(self):
|
||||||
for prj, u in TARGET_REPO_SETTINGS[self.openqa.baseurl].items():
|
for prj, u in TARGET_REPO_SETTINGS[self.openqa.baseurl].items():
|
||||||
@ -323,20 +419,24 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
buildnr = j['settings']['BUILD']
|
buildnr = j['settings']['BUILD']
|
||||||
cjob = int(j['id'])
|
cjob = int(j['id'])
|
||||||
self.update_test_builds[prj] = buildnr
|
self.update_test_builds[prj] = buildnr
|
||||||
|
jobs = self.jobs_for_target(u, build=buildnr)
|
||||||
|
self.openqa_jobs[prj] = jobs
|
||||||
|
if self.calculate_qa_status(jobs) == QA_INPROGRESS:
|
||||||
|
self.pending_target_repos.add(prj)
|
||||||
|
|
||||||
# reimplemention from baseclass
|
# reimplemention from baseclass
|
||||||
def check_requests(self):
|
def check_requests(self):
|
||||||
|
|
||||||
# first calculate the latest build number for current jobs
|
if self.apiurl.endswith('.suse.de'):
|
||||||
self.gather_test_builds()
|
self.check_suse_incidents()
|
||||||
|
|
||||||
|
# first calculate the latest build number for current jobs
|
||||||
self.pending_target_repos = set()
|
self.pending_target_repos = set()
|
||||||
|
self.gather_test_builds()
|
||||||
|
|
||||||
started = []
|
started = []
|
||||||
# then check progress on running incidents
|
# then check progress on running incidents
|
||||||
for req in self.requests:
|
for req in self.requests:
|
||||||
# just patch apiurl in to avoid having to pass it around
|
|
||||||
req.apiurl = self.apiurl
|
|
||||||
jobs = self.request_get_openqa_jobs(req, incident=True, test_repo=True)
|
jobs = self.request_get_openqa_jobs(req, incident=True, test_repo=True)
|
||||||
ret = self.calculate_qa_status(jobs)
|
ret = self.calculate_qa_status(jobs)
|
||||||
if ret != QA_UNKNOWN:
|
if ret != QA_UNKNOWN:
|
||||||
@ -372,8 +472,10 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
self.logger.warn("not handling %s" % a.tgt_project)
|
self.logger.warn("not handling %s" % a.tgt_project)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# TODO - this needs to be moved
|
||||||
|
return None
|
||||||
|
|
||||||
packages = []
|
packages = []
|
||||||
patch_id = None
|
|
||||||
# patchinfo collects the binaries and is build for an
|
# patchinfo collects the binaries and is build for an
|
||||||
# unpredictable architecture so we need iterate over all
|
# unpredictable architecture so we need iterate over all
|
||||||
url = osc.core.makeurl(
|
url = osc.core.makeurl(
|
||||||
@ -395,55 +497,11 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
# can't use arch here as the patchinfo mixes all
|
# can't use arch here as the patchinfo mixes all
|
||||||
# archs
|
# archs
|
||||||
packages.append(Package(m.group('name'), m.group('version'), m.group('release')))
|
packages.append(Package(m.group('name'), m.group('version'), m.group('release')))
|
||||||
elif binary.attrib['filename'] == 'updateinfo.xml':
|
|
||||||
url = osc.core.makeurl(
|
|
||||||
self.apiurl,
|
|
||||||
('build', a.src_project, a.tgt_project.replace(':', '_'),
|
|
||||||
arch,
|
|
||||||
a.src_package,
|
|
||||||
'updateinfo.xml'))
|
|
||||||
ui = ET.parse(osc.core.http_GET(url)).getroot()
|
|
||||||
patch_id = ui.find('.//id').text
|
|
||||||
|
|
||||||
if not packages:
|
if not packages:
|
||||||
raise Exception("no packages found")
|
raise Exception("no packages found")
|
||||||
|
|
||||||
self.logger.debug('found packages %s and patch id %s', ' '.join(set([p.name for p in packages])), patch_id)
|
update.calculate_lastest_good_updates(self.openqa, settings)
|
||||||
|
|
||||||
for update in PROJECT_OPENQA_SETTINGS[a.tgt_project]:
|
|
||||||
settings = update.settings(a.src_project, a.tgt_project, packages, req)
|
|
||||||
settings['INCIDENT_PATCH'] = patch_id
|
|
||||||
if settings:
|
|
||||||
# is old style kgraft check if all options correctly set
|
|
||||||
if settings['FLAVOR'] == 'KGraft' and 'VIRSH_GUESTNAME' not in settings:
|
|
||||||
self.logger.info("build: {!s} hasn't valid values for kgraft".format(settings['BUILD']))
|
|
||||||
return None
|
|
||||||
|
|
||||||
# don't start KGRAFT job on Server-DVD-Incidents FLAVOR
|
|
||||||
if settings['FLAVOR'] == 'Server-DVD-Incidents':
|
|
||||||
if settings['BUILD'].split('.')[1].startswith('kgraft-patch'):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# kernel incidents jobs -- discard all without 'start' = True
|
|
||||||
if settings['FLAVOR'] == 'Server-DVD-Incidents-Kernel':
|
|
||||||
if 'start' in settings:
|
|
||||||
del settings['start']
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
update.calculate_lastest_good_updates(self.openqa, settings)
|
|
||||||
|
|
||||||
self.logger.info("posting %s %s %s", settings['VERSION'], settings['ARCH'], settings['BUILD'])
|
|
||||||
self.logger.debug('\n'.join([" %s=%s" % i for i in settings.items()]))
|
|
||||||
if not self.dryrun:
|
|
||||||
try:
|
|
||||||
ret = self.openqa.openqa_request('POST', 'isos', data=settings, retries=1)
|
|
||||||
self.logger.info(pformat(ret))
|
|
||||||
except JSONDecodeError as e:
|
|
||||||
self.logger.error(e)
|
|
||||||
# TODO: record error
|
|
||||||
except openqa_exceptions.RequestError as e:
|
|
||||||
self.logger.error(e)
|
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -488,30 +546,34 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
if req is None:
|
if req is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# skip kgraft patches from aggregation
|
||||||
req_ = osc.core.Request()
|
req_ = osc.core.Request()
|
||||||
req_.read(req)
|
req_.read(req)
|
||||||
kgraft_target, action = SUSEUpdate.kgraft_target(req_)
|
src_prjs = set([a.src_project for a in req_.actions])
|
||||||
# skip kgraft patches from aggregation
|
if SUSEUpdate.kgraft_target(self.apiurl, src_prjs.pop()):
|
||||||
if kgraft_target:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
incidents.append(incident)
|
incidents.append(incident)
|
||||||
|
|
||||||
l_incidents.append((kind + '_TEST_ISSUES', ','.join(incidents)))
|
l_incidents.append((kind + '_TEST_ISSUES', ','.join(incidents)))
|
||||||
|
|
||||||
return l_incidents
|
return l_incidents
|
||||||
|
|
||||||
def jobs_for_target(self, data):
|
def jobs_for_target(self, data, build=None):
|
||||||
s = data['settings'][0]
|
s = data['settings'][0]
|
||||||
return self.openqa.openqa_request(
|
values = {
|
||||||
'GET', 'jobs',
|
'distri': s['DISTRI'],
|
||||||
{
|
'version': s['VERSION'],
|
||||||
'distri': s['DISTRI'],
|
'arch': s['ARCH'],
|
||||||
'version': s['VERSION'],
|
'flavor': s['FLAVOR'],
|
||||||
'arch': s['ARCH'],
|
'scope': 'relevant',
|
||||||
'flavor': s['FLAVOR'],
|
'latest': '1',
|
||||||
'test': data['test'],
|
}
|
||||||
'latest': '1',
|
if build:
|
||||||
})['jobs']
|
values['build'] = build
|
||||||
|
else:
|
||||||
|
values['test'] = data['test']
|
||||||
|
return self.openqa.openqa_request('GET', 'jobs', values)['jobs']
|
||||||
|
|
||||||
# we don't know the current BUILD and querying all jobs is too expensive
|
# we don't know the current BUILD and querying all jobs is too expensive
|
||||||
# so we need to check for one known TEST first
|
# so we need to check for one known TEST first
|
||||||
@ -576,37 +638,14 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
build = src_prjs.pop()
|
build = src_prjs.pop()
|
||||||
tgt_prjs = set([a.tgt_project for a in req.actions])
|
tgt_prjs = set([a.tgt_project for a in req.actions])
|
||||||
ret = []
|
ret = []
|
||||||
for prj in tgt_prjs:
|
if incident:
|
||||||
if incident and prj in PROJECT_OPENQA_SETTINGS:
|
ret += self.openqa_jobs[build]
|
||||||
for u in PROJECT_OPENQA_SETTINGS[prj]:
|
for prj in sorted(tgt_prjs):
|
||||||
s = u.settings(build, prj, [], req=req)
|
|
||||||
ret += self.openqa.openqa_request(
|
|
||||||
'GET', 'jobs',
|
|
||||||
{
|
|
||||||
'distri': s['DISTRI'],
|
|
||||||
'version': s['VERSION'],
|
|
||||||
'arch': s['ARCH'],
|
|
||||||
'flavor': s['FLAVOR'],
|
|
||||||
'build': s['BUILD'],
|
|
||||||
'scope': 'relevant',
|
|
||||||
})['jobs']
|
|
||||||
repo_settings = TARGET_REPO_SETTINGS.get(self.openqa.baseurl, {})
|
repo_settings = TARGET_REPO_SETTINGS.get(self.openqa.baseurl, {})
|
||||||
if test_repo and prj in repo_settings:
|
if test_repo and prj in repo_settings:
|
||||||
u = repo_settings[prj]
|
repo_jobs = self.openqa_jobs[prj]
|
||||||
for s in u['settings']:
|
ret += repo_jobs
|
||||||
repo_jobs = self.openqa.openqa_request(
|
|
||||||
'GET', 'jobs',
|
|
||||||
{
|
|
||||||
'distri': s['DISTRI'],
|
|
||||||
'version': s['VERSION'],
|
|
||||||
'arch': s['ARCH'],
|
|
||||||
'flavor': s['FLAVOR'],
|
|
||||||
'build': self.update_test_builds.get(prj, 'UNKNOWN'),
|
|
||||||
'scope': 'relevant',
|
|
||||||
})['jobs']
|
|
||||||
ret += repo_jobs
|
|
||||||
if self.calculate_qa_status(repo_jobs) == QA_INPROGRESS:
|
|
||||||
self.pending_target_repos.add(prj)
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def calculate_qa_status(self, jobs=None):
|
def calculate_qa_status(self, jobs=None):
|
||||||
@ -639,36 +678,29 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
return QA_PASSED
|
return QA_PASSED
|
||||||
|
|
||||||
def check_publish_enabled(self, project):
|
def add_comment(self, msg, state, request_id=None, result=None):
|
||||||
url = osc.core.makeurl(self.apiurl, ('source', project, '_meta'))
|
|
||||||
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
||||||
node = root.find('publish')
|
|
||||||
if node is not None and node.find('disable') is not None:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def add_comment(self, req, msg, state, result=None):
|
|
||||||
if not self.do_comments:
|
if not self.do_comments:
|
||||||
return
|
return
|
||||||
|
|
||||||
comment = "<!-- openqa state=%s%s -->\n" % (state, ' result=%s' % result if result else '')
|
comment = "<!-- openqa state=%s%s -->\n" % (state, ' result=%s' % result if result else '')
|
||||||
comment += "\n" + msg
|
comment += "\n" + msg
|
||||||
|
|
||||||
(comment_id, comment_state, comment_result, comment_text) = self.find_obs_request_comment(req, state)
|
info = self.find_obs_request_comment(request_id=request_id)
|
||||||
|
comment_id = info.get('id', None)
|
||||||
|
|
||||||
if comment_id is not None and state == comment_state:
|
if state == info.get('state', 'missing'):
|
||||||
lines_before = len(comment_text.split('\n'))
|
lines_before = len(info['comment'].split('\n'))
|
||||||
lines_after = len(comment.split('\n'))
|
lines_after = len(comment.split('\n'))
|
||||||
if lines_before == lines_after:
|
if lines_before == lines_after:
|
||||||
self.logger.debug("not worth the update, previous comment %s is state %s", comment_id, comment_state)
|
self.logger.debug("not worth the update, previous comment %s is state %s", comment_id, info['state'])
|
||||||
return
|
return
|
||||||
|
|
||||||
self.logger.debug("adding comment to %s, state %s result %s", req.reqid, state, result)
|
self.logger.debug("adding comment to %s, state %s result %s", request_id, state, result)
|
||||||
self.logger.debug("message: %s", msg)
|
self.logger.debug("message: %s", msg)
|
||||||
if not self.dryrun:
|
if not self.dryrun:
|
||||||
if comment_id is not None:
|
if comment_id is not None:
|
||||||
self.commentapi.delete(comment_id)
|
self.commentapi.delete(comment_id)
|
||||||
self.commentapi.add_comment(request_id=req.reqid, comment=str(comment))
|
self.commentapi.add_comment(request_id=request_id, comment=str(comment))
|
||||||
|
|
||||||
# escape markdown
|
# escape markdown
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -687,7 +719,10 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
def summarize_one_openqa_job(self, job):
|
def summarize_one_openqa_job(self, job):
|
||||||
testurl = osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])])
|
testurl = osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])])
|
||||||
if not job['result'] in ['passed', 'failed', 'softfailed']:
|
if not job['result'] in ['passed', 'failed', 'softfailed']:
|
||||||
return '\n- [%s](%s) is %s' % (self.job_test_name(job), testurl, job['result'])
|
rstring = job['result']
|
||||||
|
if rstring == 'none':
|
||||||
|
return None
|
||||||
|
return '\n- [%s](%s) is %s' % (self.job_test_name(job), testurl, rstring)
|
||||||
|
|
||||||
modstrings = []
|
modstrings = []
|
||||||
for module in job['modules']:
|
for module in job['modules']:
|
||||||
@ -701,6 +736,50 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
return '\n- [%s](%s) failed' % (self.job_test_name(job), testurl)
|
return '\n- [%s](%s) failed' % (self.job_test_name(job), testurl)
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
def summarize_openqa_jobs(self, jobs):
|
||||||
|
groups = dict()
|
||||||
|
for job in jobs:
|
||||||
|
gl = "%s@%s" % (self.emd(job['group']), self.emd(job['settings']['FLAVOR']))
|
||||||
|
if gl not in groups:
|
||||||
|
groupurl = osc.core.makeurl(self.openqa.baseurl, ['tests', 'overview'],
|
||||||
|
{'version': job['settings']['VERSION'],
|
||||||
|
'groupid': job['group_id'],
|
||||||
|
'flavor': job['settings']['FLAVOR'],
|
||||||
|
'distri': job['settings']['DISTRI'],
|
||||||
|
'build': job['settings']['BUILD'],
|
||||||
|
})
|
||||||
|
groups[gl] = {'title': "__Group [%s](%s)__\n" % (gl, groupurl),
|
||||||
|
'passed': 0, 'unfinished': 0, 'failed': []}
|
||||||
|
|
||||||
|
job_summary = self.summarize_one_openqa_job(job)
|
||||||
|
if job_summary is None:
|
||||||
|
groups[gl]['unfinished'] = groups[gl]['unfinished'] + 1
|
||||||
|
continue
|
||||||
|
# None vs ''
|
||||||
|
if not len(job_summary):
|
||||||
|
groups[gl]['passed'] = groups[gl]['passed'] + 1
|
||||||
|
continue
|
||||||
|
# if there is something to report, hold the request
|
||||||
|
qa_state = QA_FAILED
|
||||||
|
gmsg = groups[gl]
|
||||||
|
groups[gl]['failed'].append(job_summary)
|
||||||
|
|
||||||
|
msg = ''
|
||||||
|
for group in sorted(groups.keys()):
|
||||||
|
msg += "\n\n" + groups[group]['title']
|
||||||
|
infos = []
|
||||||
|
if groups[group]['passed']:
|
||||||
|
infos.append("%d tests passed" % groups[group]['passed'])
|
||||||
|
if len(groups[group]['failed']):
|
||||||
|
infos.append("%d tests failed" % len(groups[group]['failed']))
|
||||||
|
if groups[group]['unfinished']:
|
||||||
|
infos.append("%d unfinished tests" % groups[group]['unfinished'])
|
||||||
|
msg += "(" + ', '.join(infos) + ")\n"
|
||||||
|
for fail in groups[group]['failed']:
|
||||||
|
msg += fail
|
||||||
|
|
||||||
|
return msg
|
||||||
|
|
||||||
def check_one_request(self, req):
|
def check_one_request(self, req):
|
||||||
ret = None
|
ret = None
|
||||||
|
|
||||||
@ -715,19 +794,19 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
if self.force:
|
if self.force:
|
||||||
# make sure to delete previous comments if we're forcing
|
# make sure to delete previous comments if we're forcing
|
||||||
(comment_id, comment_state, comment_result, comment_text) = self.find_obs_request_comment(req)
|
info = self.find_obs_request_comment(request_id=req.reqid)
|
||||||
if comment_id is not None:
|
if 'id' in info:
|
||||||
self.logger.debug("deleting old comment %s", comment_id)
|
self.logger.debug("deleting old comment %s", info['id'])
|
||||||
if not self.dryrun:
|
if not self.dryrun:
|
||||||
self.commentapi.delete(comment_id)
|
self.commentapi.delete(info['id'])
|
||||||
|
|
||||||
if not jobs:
|
if not jobs:
|
||||||
msg = "no openQA tests defined"
|
msg = "no openQA tests defined"
|
||||||
self.add_comment(req, msg, 'done', 'accepted')
|
self.add_comment(msg, 'done', request_id=req.reqid, result='accepted')
|
||||||
ret = True
|
ret = True
|
||||||
else:
|
else:
|
||||||
# no notification until the result is done
|
# no notification until the result is done
|
||||||
osc.core.change_review_state(req.apiurl, req.reqid, newstate='new',
|
osc.core.change_review_state(self.apiurl, req.reqid, newstate='new',
|
||||||
by_group=self.review_group, by_user=self.review_user,
|
by_group=self.review_group, by_user=self.review_user,
|
||||||
message='now testing in openQA')
|
message='now testing in openQA')
|
||||||
elif qa_state == QA_FAILED or qa_state == QA_PASSED:
|
elif qa_state == QA_FAILED or qa_state == QA_PASSED:
|
||||||
@ -738,47 +817,17 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
self.logger.debug(
|
self.logger.debug(
|
||||||
"incident tests for request %s are done, but need to wait for test repo", req.reqid)
|
"incident tests for request %s are done, but need to wait for test repo", req.reqid)
|
||||||
return
|
return
|
||||||
groups = dict()
|
|
||||||
for job in jobs:
|
|
||||||
gl = "%s@%s" % (self.emd(job['group']), self.emd(job['settings']['FLAVOR']))
|
|
||||||
if gl not in groups:
|
|
||||||
groupurl = osc.core.makeurl(self.openqa.baseurl, ['tests', 'overview'],
|
|
||||||
{'version': job['settings']['VERSION'],
|
|
||||||
'groupid': job['group_id'],
|
|
||||||
'flavor': job['settings']['FLAVOR'],
|
|
||||||
'distri': job['settings']['DISTRI'],
|
|
||||||
'build': job['settings']['BUILD'],
|
|
||||||
})
|
|
||||||
groups[gl] = {'title': "__Group [%s](%s)__\n" % (gl, groupurl),
|
|
||||||
'passed': 0, 'failed': []}
|
|
||||||
|
|
||||||
job_summary = self.summarize_one_openqa_job(job)
|
|
||||||
if not len(job_summary):
|
|
||||||
groups[gl]['passed'] = groups[gl]['passed'] + 1
|
|
||||||
continue
|
|
||||||
# if there is something to report, hold the request
|
|
||||||
qa_state = QA_FAILED
|
|
||||||
gmsg = groups[gl]
|
|
||||||
groups[gl]['failed'].append(job_summary)
|
|
||||||
|
|
||||||
if qa_state == QA_PASSED:
|
if qa_state == QA_PASSED:
|
||||||
self.logger.debug("request %s passed", req.reqid)
|
|
||||||
msg = "openQA tests passed\n"
|
msg = "openQA tests passed\n"
|
||||||
state = 'accepted'
|
result = 'accepted'
|
||||||
ret = True
|
ret = True
|
||||||
else:
|
else:
|
||||||
self.logger.debug("request %s failed", req.reqid)
|
|
||||||
msg = "openQA tests problematic\n"
|
msg = "openQA tests problematic\n"
|
||||||
state = 'declined'
|
result = 'declined'
|
||||||
ret = False
|
ret = False
|
||||||
|
|
||||||
for group in sorted(groups.keys()):
|
msg += self.summarize_openqa_jobs(jobs)
|
||||||
msg += "\n\n" + groups[group]['title']
|
self.add_comment(msg, 'done', result=result, request_id=req.reqid)
|
||||||
msg += "(%d tests passed, %d failed)\n" % (groups[group]['passed'], len(groups[group]['failed']))
|
|
||||||
for fail in groups[group]['failed']:
|
|
||||||
msg += fail
|
|
||||||
|
|
||||||
self.add_comment(req, msg, 'done', state)
|
|
||||||
elif qa_state == QA_INPROGRESS:
|
elif qa_state == QA_INPROGRESS:
|
||||||
self.logger.debug("request %s still in progress", req.reqid)
|
self.logger.debug("request %s still in progress", req.reqid)
|
||||||
else:
|
else:
|
||||||
@ -792,15 +841,119 @@ class OpenQABot(ReviewBot.ReviewBot):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def find_obs_request_comment(self, req, state=None):
|
def find_obs_request_comment(self, request_id=None, project_name=None):
|
||||||
"""Return previous comments (should be one)."""
|
"""Return previous comments (should be one)."""
|
||||||
if self.do_comments:
|
if self.do_comments:
|
||||||
comments = self.commentapi.get_comments(request_id=req.reqid)
|
comments = self.commentapi.get_comments(request_id=request_id, project_name=project_name)
|
||||||
for c in comments.values():
|
for c in comments.values():
|
||||||
m = comment_marker_re.match(c['comment'])
|
m = comment_marker_re.match(c['comment'])
|
||||||
if m and (state is None or state == m.group('state')):
|
if m:
|
||||||
return c['id'], m.group('state'), m.group('result'), c['comment']
|
return {'id': c['id'], 'state': m.group('state'), 'result': m.group('result'), 'comment': c['comment'], 'revision': m.group('revision')}
|
||||||
return None, None, None, None
|
return {}
|
||||||
|
|
||||||
|
def check_product(self, job, product_prefix):
|
||||||
|
pmap = API_MAP[product_prefix]
|
||||||
|
posts = []
|
||||||
|
for arch in pmap['archs']:
|
||||||
|
need = False
|
||||||
|
settings = {'VERSION': pmap['version'], 'ARCH': arch, 'DISTRI': 'sle'}
|
||||||
|
issues = pmap.get('issues', {})
|
||||||
|
issues['OS_TEST_ISSUES'] = product_prefix
|
||||||
|
for key, prefix in issues.items():
|
||||||
|
if prefix + arch in job['channels']:
|
||||||
|
settings[key] = str(job['id'])
|
||||||
|
need = True
|
||||||
|
if need:
|
||||||
|
u = PROJECT_OPENQA_SETTINGS[product_prefix + arch]
|
||||||
|
u.apiurl = self.apiurl
|
||||||
|
for s in u.settings(u.maintenance_project() + ':' + str(job['id']), product_prefix + arch, []):
|
||||||
|
if job.get('openqa_build') is None:
|
||||||
|
job['openqa_build'] = u.get_max_revision(job)
|
||||||
|
if job.get('openqa_build') is None:
|
||||||
|
return []
|
||||||
|
s['BUILD'] += '.' + str(job['openqa_build'])
|
||||||
|
s.update(settings)
|
||||||
|
posts.append(s)
|
||||||
|
return posts
|
||||||
|
|
||||||
|
def incident_openqa_jobs(self, s):
|
||||||
|
return self.openqa.openqa_request(
|
||||||
|
'GET', 'jobs',
|
||||||
|
{
|
||||||
|
'distri': s['DISTRI'],
|
||||||
|
'version': s['VERSION'],
|
||||||
|
'arch': s['ARCH'],
|
||||||
|
'flavor': s['FLAVOR'],
|
||||||
|
'build': s['BUILD'],
|
||||||
|
'scope': 'relevant',
|
||||||
|
'latest': '1'
|
||||||
|
})['jobs']
|
||||||
|
|
||||||
|
def check_suse_incidents(self):
|
||||||
|
for inc in requests.get('https://maintenance.suse.de/api/incident/active/').json():
|
||||||
|
# if not inc in ['5219']: continue
|
||||||
|
# if not inc.startswith('52'): continue
|
||||||
|
print inc
|
||||||
|
# continue
|
||||||
|
job = requests.get('https://maintenance.suse.de/api/incident/' + inc).json()
|
||||||
|
if job['meta']['state'] in ['final', 'gone']:
|
||||||
|
continue
|
||||||
|
# required in job: project, id, channels
|
||||||
|
self.test_job(job['base'])
|
||||||
|
|
||||||
|
def test_job(self, job):
|
||||||
|
incident_project = str(job['project'])
|
||||||
|
comment_info = self.find_obs_request_comment(project_name=incident_project)
|
||||||
|
comment_id = comment_info.get('id', None)
|
||||||
|
comment_build = str(comment_info.get('revision', ''))
|
||||||
|
|
||||||
|
openqa_posts = []
|
||||||
|
for prod in API_MAP.keys():
|
||||||
|
openqa_posts += self.check_product(job, prod)
|
||||||
|
openqa_jobs = []
|
||||||
|
for s in openqa_posts:
|
||||||
|
jobs = self.incident_openqa_jobs(s)
|
||||||
|
# take the project comment as marker for not posting jobs
|
||||||
|
if not len(jobs) and comment_build != str(job['openqa_build']):
|
||||||
|
if self.dryrun:
|
||||||
|
print 'WOULD POST', json.dumps(s, sort_keys=True)
|
||||||
|
else:
|
||||||
|
ret = self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
|
||||||
|
openqa_jobs += self.incident_openqa_jobs(s)
|
||||||
|
else:
|
||||||
|
print s, 'got', len(jobs)
|
||||||
|
openqa_jobs += jobs
|
||||||
|
self.openqa_jobs[incident_project] = openqa_jobs
|
||||||
|
if len(openqa_jobs) == 0:
|
||||||
|
self.logger.debug("No openqa jobs defined")
|
||||||
|
return
|
||||||
|
# print openqa_jobs
|
||||||
|
msg = self.summarize_openqa_jobs(openqa_jobs)
|
||||||
|
state = 'seen'
|
||||||
|
result = 'none'
|
||||||
|
qa_status = self.calculate_qa_status(openqa_jobs)
|
||||||
|
if qa_status == QA_PASSED:
|
||||||
|
result = 'accepted'
|
||||||
|
state = 'done'
|
||||||
|
if qa_status == QA_FAILED:
|
||||||
|
result = 'declined'
|
||||||
|
state = 'done'
|
||||||
|
comment = "<!-- openqa state=%s result=%s revision=%s -->\n" % (state, result, job.get('openqa_build'))
|
||||||
|
comment += "\nCC @coolo\n" + msg
|
||||||
|
|
||||||
|
if comment_id and state != 'done':
|
||||||
|
self.logger.debug("%s is already commented, wait until done", incident_project)
|
||||||
|
return
|
||||||
|
if comment_info.get('comment', '') == comment:
|
||||||
|
self.logger.debug("%s comment did not change", incident_project)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.logger.debug("adding comment to %s, state %s", incident_project, state)
|
||||||
|
#self.logger.debug("message: %s", msg)
|
||||||
|
if not self.dryrun:
|
||||||
|
if comment_id is not None:
|
||||||
|
self.commentapi.delete(comment_id)
|
||||||
|
self.commentapi.add_comment(project_name=str(incident_project), comment=str(comment))
|
||||||
|
|
||||||
|
|
||||||
class CommandLineInterface(ReviewBot.CommandLineInterface):
|
class CommandLineInterface(ReviewBot.CommandLineInterface):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user