Merge pull request #1148 from mimi1vx/oqamaintbot

Rework openqa-maintenance bot
This commit is contained in:
Stephan Kulow 2018-04-17 13:01:25 +02:00 committed by GitHub
commit b8337f43a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 1117 additions and 206 deletions

View File

@ -62,6 +62,7 @@ matrix:
- pip install pycurl urlgrabber m2crypto
- pip install -r requirements.txt
- pip install python-coveralls
- pip install nose-exclude
before_script:
# travis-ci/travis-ci#7008: stop services to make room for OBS setup
- sudo service mysql stop
@ -70,7 +71,7 @@ matrix:
# Needs python prefix to use the correct interpretor.
- python ./obs_clone.py --cache --debug --apiurl-target local
script:
- nosetests --with-coverage --cover-package=. --cover-inclusive
- nosetests --with-coverage --cover-package=. --cover-inclusive --exclude-dir=./oqamaint
after_success:
- coveralls

View File

@ -8,16 +8,6 @@
"ppc64le"
]
},
"SUSE:Updates:SLE-DESKTOP:12-SP2:" : {
"issues" : {
"SDK_TEST_ISSUES" : "SUSE:Updates:SLE-SDK:12-SP2:"
},
"version" : "12-SP2",
"flavor" : "Desktop-DVD-Incidents",
"archs" : [
"x86_64"
]
},
"SUSE:Updates:SLE-SERVER:12-LTSS:" : {
"version" : "12",
"flavor" : "Server-DVD-Incidents",
@ -30,10 +20,10 @@
"SUSE:Updates:SLE-SERVER:12-SP3:" : {
"issues" : {
"WE_TEST_ISSUES" : "SUSE:Updates:SLE-WE:12-SP3:",
"TCM_TEST_ISSUES" : "SUSE:Maintenance:Test:SLE-Module-Toolchain:12:",
"TCM_TEST_ISSUES" : "SUSE:Updates:SLE-Module-Toolchain:12:",
"HPCM_TEST_ISSUES" : "SUSE:Updates:SLE-Module-HPC:12:",
"SDK_TEST_ISSUES" : "SUSE:Updates:SLE-SDK:12-SP3:",
"WSM_TEST_ISSUES" : "SUSE:Maintenance:Test:SLE-Module-Web-Scripting:12:"
"WSM_TEST_ISSUES" : "SUSE:Updates:SLE-Module-Web-Scripting:12:"
},
"flavor" : "Server-DVD-Incidents",
"version" : "12-SP3",
@ -54,21 +44,75 @@
"x86_64"
]
},
"SUSE:Updates:SLE-SERVER:12-SP2:" : {
"SUSE:Updates:SLE-SERVER:12-SP2-LTSS:" : {
"version" : "12-SP2",
"issues" : {
"WE_TEST_ISSUES" : "SUSE:Updates:SLE-WE:12-SP2:",
"TCM_TEST_ISSUES" : "SUSE:Maintenance:Test:SLE-Module-Toolchain:12:",
"SDK_TEST_ISSUES" : "SUSE:Updates:SLE-SDK:12-SP2:",
"HPCM_TEST_ISSUES" : "SUSE:Updates:SLE-Module-HPC:12:",
"WSM_TEST_ISSUES" : "SUSE:Maintenance:Test:SLE-Module-Web-Scripting:12:"
},
"flavor" : "Server-DVD-Incidents",
"archs" : [
"x86_64",
"s390x",
"ppc64le",
"aarch64"
"ppc64le"
]
}
},
"SUSE:Updates:SLE-Live-Patching:12:" : {
"version" : "12",
"flavor" : "Server-DVD-Incidents-Kernel",
"archs" : [
"x86_64"
]
},
"SUSE:Updates:SLE-Live-Patching:12-SP3:" : {
"version" : "12-SP3",
"flavor" : "Server-DVD-Incidents-Kernel",
"archs" : [
"x86_64",
"ppc64le"
]
},
"SUSE:Updates:SUSE-CAASP:ALL:" : {
"version" : "2.0",
"flavor" : "CaaSP-DVD-Incidents",
"distri" : "caasp",
"archs" : [
"x86_64"
]
},
"SUSE:Updates:SLE-HA:12-SP3:" : {
"version" : "12-SP3",
"issues" : {
"OS_TEST_ISSUES" : "SUSE:Updates:SLE-SERVER:12-SP3:",
"HA_TEST_ISSUES" : "SUSE:Updates:SLE-HA:12-SP3:"
},
"required_issue" : "HA_TEST_ISSUES",
"flavor" : "Server-DVD-Incidents-HA",
"distri" : "sle",
"archs" : [
"x86_64"
]
},
"SUSE:Updates:SLE-HA:12-SP2:" : {
"version" : "12-SP2",
"issues" : {
"OS_TEST_ISSUES" : "SUSE:Updates:SLE-SERVER:12-SP2:",
"HA_TEST_ISSUES" : "SUSE:Updates:SLE-HA:12-SP2:"
},
"flavor" : "Server-DVD-Incidents-HA",
"required_issue" : "HA_TEST_ISSUES",
"distri" : "sle",
"archs" : [
"x86_64"
]
},
"SUSE:Updates:Storage:5:" : {
"version" : "12-SP3",
"issues" : {
"OS_TEST_ISSUES" : "SUSE:Updates:SLE-SERVER:12-SP3:",
"SES_TEST_ISSUES" : "SUSE:Updates:Storage:5:"
},
"required_issue" : "SES_TEST_ISSUES",
"flavor" : "Server-DVD-SES-Updates",
"distri" : "sle",
"archs" : [
"x86_64"
]
}
}

View File

@ -24,19 +24,13 @@
"ARCH" : "x86_64",
"ISO" : "openSUSE-Leap-42.3-DVD-x86_64.iso"
},
"SUSE:Updates:SLE-DESKTOP:12-SP2:x86_64" : {
"FLAVOR" : "Desktop-DVD-Incidents",
"VERSION" : "12-SP2",
"DISTRI" : "sle",
"ARCH" : "x86_64"
},
"SUSE:Updates:SLE-DESKTOP:12-SP3:x86_64" : {
"FLAVOR" : "Desktop-DVD-Incidents",
"VERSION" : "12-SP3",
"DISTRI" : "sle",
"ARCH" : "x86_64"
},
"SUSE:Updates:SLE-SERVER:12-SP2:s390x" : {
"SUSE:Updates:SLE-SERVER:12-SP2-LTSS:s390x" : {
"DISTRI" : "sle",
"ARCH" : "s390x",
"VERSION" : "12-SP2",
@ -55,11 +49,12 @@
"FLAVOR" : "Server-DVD-Incidents-Kernel",
"KGRAFT" : "1"
},
"SUSE:Updates:SLE-SERVER:12-SP2:aarch64" : {
"SUSE:Updates:SLE-Live-Patching:12-SP3:ppc64le" : {
"DISTRI" : "sle",
"ARCH" : "aarch64",
"FLAVOR" : "Server-DVD-Incidents",
"VERSION" : "12-SP2"
"ARCH" : "ppc64le",
"VERSION" : "12-SP3",
"FLAVOR" : "Server-DVD-Incidents-Kernel",
"KGRAFT" : "1"
},
"SUSE:Updates:SLE-SERVER:12-SP3:x86_64" : {
"VERSION" : "12-SP3",
@ -73,8 +68,14 @@
"ARCH" : "ppc64le",
"DISTRI" : "sle"
},
"SUSE:Updates:SLE-Live-Patching:12-SP3:x86_64" : {
"FLAVOR" : "Server-DVD-Incidents-Kernel",
"VERSION" : "12-SP3",
"ARCH" : "x86_64",
"DISTRI" : "sle"
},
"SUSE:Updates:SLE-Live-Patching:12:x86_64" : {
"FLAVOR" : "KGraft",
"FLAVOR" : "Server-DVD-Incidents-Kernel",
"VERSION" : "12",
"ARCH" : "x86_64",
"DISTRI" : "sle"
@ -91,7 +92,7 @@
"DISTRI" : "sle",
"ARCH" : "x86_64"
},
"SUSE:Updates:SLE-SERVER:12-SP2:ppc64le" : {
"SUSE:Updates:SLE-SERVER:12-SP2-LTSS:ppc64le" : {
"VERSION" : "12-SP2",
"FLAVOR" : "Server-DVD-Incidents",
"ARCH" : "ppc64le",
@ -109,10 +110,34 @@
"DISTRI" : "sle",
"ARCH" : "x86_64"
},
"SUSE:Updates:SLE-SERVER:12-SP2:x86_64" : {
"SUSE:Updates:SLE-SERVER:12-SP2-LTSS:x86_64" : {
"VERSION" : "12-SP2",
"FLAVOR" : "Server-DVD-Incidents",
"ARCH" : "x86_64",
"DISTRI" : "sle"
}
},
"SUSE:Updates:SUSE-CAASP:ALL:x86_64": {
"DISTRI" : "caasp",
"FLAVOR" : "CaaSP-DVD-Incidents",
"VERSION" : "2.0",
"ARCH": "x86_64"
},
"SUSE:Updates:Storage:5:x86_64": {
"DISTRI" : "sle",
"FLAVOR" : "Server-DVD-SES-Updates",
"VERSION" : "12-SP3",
"ARCH" : "x86_64"
},
"SUSE:Updates:SLE-HA:12-SP3:x86_64": {
"DISTRI" : "sle",
"FLAVOR" : "Server-DVD-Incidents-HA",
"VERSION" : "12-SP3",
"ARCH" : "x86_64"
},
"SUSE:Updates:SLE-HA:12-SP2:x86_64": {
"DISTRI" : "sle",
"FLAVOR" : "Server-DVD-Incidents-HA",
"VERSION" : "12-SP2",
"ARCH" : "x86_64"
}
}

View File

@ -1,127 +0,0 @@
{
"SLE12-SP1_Update_10": {
"VIRSH_GUESTNAME": "kGraft1b",
"VIRSH_INSTANCE": 6211,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12-SP1_Update_11": {
"VIRSH_GUESTNAME": "kGraft1c",
"VIRSH_INSTANCE": 6212,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12-SP1_Update_12": {
"VIRSH_GUESTNAME": "kGraft1d",
"VIRSH_INSTANCE": 6213,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12-SP1_Update_13": {
"VIRSH_GUESTNAME": "kGraft1e",
"VIRSH_INSTANCE": 6214,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12-SP1_Update_14": {
"VIRSH_GUESTNAME": "kGraft1f",
"VIRSH_INSTANCE": 6215,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12-SP1_Update_5": {
"VIRSH_GUESTNAME": "kGraft16",
"VIRSH_INSTANCE": 6206,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12-SP1_Update_6": {
"VIRSH_GUESTNAME": "kGraft17",
"VIRSH_INSTANCE": 6207,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12-SP1_Update_7": {
"VIRSH_GUESTNAME": "kGraft18",
"VIRSH_INSTANCE": 6208,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12-SP1_Update_8": {
"VIRSH_GUESTNAME": "kGraft19",
"VIRSH_INSTANCE": 6209,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12-SP1_Update_9": {
"VIRSH_GUESTNAME": "kGraft1a",
"VIRSH_INSTANCE": 6210,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12-SP2_Update_0": {
"VIRSH_GUESTNAME": "kGraft20",
"VIRSH_INSTANCE": 6300,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12-SP2_Update_1": {
"VIRSH_GUESTNAME": "kGraft21",
"VIRSH_INSTANCE": 6301,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12-SP2_Update_2": {
"VIRSH_GUESTNAME": "kGraft22",
"VIRSH_INSTANCE": 6302,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12-SP2_Update_3": {
"VIRSH_GUESTNAME": "kGraft23",
"VIRSH_INSTANCE": 6303,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12-SP2_Update_4": {
"VIRSH_GUESTNAME": "kGraft24",
"VIRSH_INSTANCE": 6304,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12-SP2_Update_5": {
"VIRSH_GUESTNAME": "kGraft25",
"VIRSH_INSTANCE": 6305,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12-SP2_Update_6": {
"VIRSH_GUESTNAME": "kGraft26",
"VIRSH_INSTANCE": 6306,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12_Update_13": {
"VIRSH_GUESTNAME": "kGraft0d",
"VIRSH_INSTANCE": 6113,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12_Update_14": {
"VIRSH_GUESTNAME": "kGraft0e",
"VIRSH_INSTANCE": 6114,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12_Update_15": {
"VIRSH_GUESTNAME": "kGraft0f",
"VIRSH_INSTANCE": 6115,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12_Update_16": {
"VIRSH_GUESTNAME": "kGraft0g",
"VIRSH_INSTANCE": 6116,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12_Update_17": {
"VIRSH_GUESTNAME": "kGraft0h",
"VIRSH_INSTANCE": 6117,
"WORKER_CLASS": "svirt-pegasus"
},
"SLE12_Update_18": {
"VIRSH_GUESTNAME": "kGraft0i",
"VIRSH_INSTANCE": 6118,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12_Update_19": {
"VIRSH_GUESTNAME": "kGraft0j",
"VIRSH_INSTANCE": 6119,
"WORKER_CLASS": "svirt-perseus"
},
"SLE12_Update_20": {
"VIRSH_GUESTNAME": "kGraft0k",
"VIRSH_INSTANCE": 6120,
"WORKER_CLASS": "svirt-pegasus"
}
}

View File

@ -24,27 +24,6 @@
}
},
"https://openqa.suse.de": {
"SUSE:Updates:SLE-DESKTOP:12-SP2:x86_64": {
"incidents": {
"OS": "SUSE:Maintenance:Test:SLE-DESKTOP:12-SP2:x86_64",
"SDK": "SUSE:Maintenance:Test:SLE-SDK:12-SP2:x86_64"
},
"repos": [
"http://download.suse.de/ibs/SUSE/Updates/SLE-DESKTOP/12-SP2/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-DESKTOP:/12-SP2:/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SDK:/12-SP2:/x86_64/update/",
"http://download.suse.de/ibs/SUSE/Updates/SLE-SDK/12-SP2/x86_64/update/"
],
"settings": [
{
"ARCH": "x86_64",
"DISTRI": "sle",
"FLAVOR": "Desktop-DVD-Updates",
"VERSION": "12-SP2"
}
],
"test": "qam-gnome"
},
"SUSE:Updates:SLE-DESKTOP:12-SP3:x86_64": {
"incidents": {
"OS": "SUSE:Maintenance:Test:SLE-DESKTOP:12-SP3:x86_64",
@ -102,28 +81,13 @@
],
"test": "qam-gnome"
},
"SUSE:Updates:SLE-SERVER:12-SP2:x86_64": {
"SUSE:Updates:SLE-SERVER:12-SP2-LTSS:x86_64": {
"incidents": {
"HPCM": "SUSE:Maintenance:Test:SLE-Module-HPC:12:x86_64",
"OS": "SUSE:Maintenance:Test:SLE-SERVER:12-SP2:x86_64",
"SDK": "SUSE:Maintenance:Test:SLE-SDK:12-SP2:x86_64",
"TCM": "SUSE:Maintenance:Test:SLE-Module-Toolchain:12:x86_64",
"WE": "SUSE:Maintenance:Test:SLE-WE:12-SP2:x86_64",
"WSM": "SUSE:Maintenance:Test:SLE-Module-Web-Scripting:12:x86_64"
"OS": "SUSE:Maintenance:Test:SLE-SERVER:12-SP2-LTSS:x86_64"
},
"repos": [
"http://download.suse.de/ibs/SUSE/Updates/SLE-SERVER/12-SP2/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SERVER:/12-SP2:/x86_64/update/",
"http://download.suse.de/ibs/SUSE/Updates/SLE-SDK/12-SP2/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SDK:/12-SP2:/x86_64/update/",
"http://download.suse.de/ibs/SUSE/Updates/SLE-WE/12-SP2/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-WE:/12-SP2:/x86_64/update/",
"http://download.suse.de/ibs/SUSE/Updates/SLE-Module-Web-Scripting/12/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-Module-Web-Scripting:/12:/x86_64/update/",
"http://download.suse.de/ibs/SUSE/Updates/SLE-Module-Toolchain/12/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-Module-Toolchain:/12:/x86_64/update/",
"http://download.suse.de/ibs/SUSE/Updates/SLE-Module-HPC/12/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-Module-HPC:/12:/x86_64/update/"
"http://download.suse.de/ibs/SUSE/Updates/SLE-SERVER/12-SP2-LTSS/x86_64/update/",
"http://download.suse.de/ibs/SUSE:/Maintenance:/Test:/SLE-SERVER:/12-SP2-LTSS:/x86_64/update/"
],
"settings": [
{

30
openqa-maintenance2.py Executable file
View File

@ -0,0 +1,30 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017 SUSE LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from oqamaint.cli import CommandLineInterface
import sys
if __name__ == "__main__":
app = CommandLineInterface()
sys.exit(app.main())

1
oqamaint/ReviewBot.py Symbolic link
View File

@ -0,0 +1 @@
../ReviewBot.py

0
oqamaint/__init__.py Normal file
View File

112
oqamaint/cli.py Normal file
View File

@ -0,0 +1,112 @@
# -*- coding: utf-8 -*-
# standard library
import logging
import os.path as opa
import simplejson as json
import sys
# external dependency
from openqa_client.client import OpenQA_Client
# from package itself
import osc
from openqabot import OpenQABot
from opensuse import openSUSEUpdate
import ReviewBot
from suse import SUSEUpdate
class CommandLineInterface(ReviewBot.CommandLineInterface):
def __init__(self, *args, **kwargs):
ReviewBot.CommandLineInterface.__init__(self, *args, **kwargs)
self.clazz = OpenQABot
def get_optparser(self):
parser = ReviewBot.CommandLineInterface.get_optparser(self)
parser.add_option("--force", action="store_true", help="recheck requests that are already considered done")
parser.add_option("--no-comment", dest='comment', action="store_false",
default=True, help="don't actually post comments to obs")
parser.add_option("--openqa", metavar='HOST', help="openqa api host")
parser.add_option(
"--data",
default=opa.abspath(
opa.dirname(
sys.argv[0])),
help="Path to metadata dir (data/*.json)")
return parser
def _load_metadata(self):
path = self.options.data
project = {}
with open(opa.join(path, "data/repos.json"), 'r') as f:
target = json.load(f)
with open(opa.join(path, "data/apimap.json"), 'r') as f:
api = json.load(f)
with open(opa.join(path, "data/incidents.json"), 'r') as f:
for i, j in json.load(f).items():
if i.startswith('SUSE'):
project[i] = SUSEUpdate(j)
elif i.startswith('openSUSE'):
project[i] = openSUSEUpdate(j)
else:
raise "Unknown openQA", i
return project, target, api
def postoptparse(self):
# practically quiet
level = logging.WARNING
if (self.options.debug):
level = logging.DEBUG
elif (self.options.verbose):
# recomended variant
level = logging.INFO
self.logger = logging.getLogger(self.optparser.prog)
self.logger.setLevel(level)
handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)-2s: %(message)s')
handler.setFormatter(formatter)
self.logger.addHandler(handler)
osc.conf.get_config(override_apiurl=self.options.apiurl)
if (self.options.osc_debug):
osc.conf.config['debug'] = 1
self.checker = self.setup_checker()
if self.options.config:
self.checker.load_config(self.options.config)
if self.options.review_mode:
self.checker.review_mode = self.options.review_mode
if self.options.fallback_user:
self.checker.fallback_user = self.options.fallback_user
if self.options.fallback_group:
self.checker.fallback_group = self.options.fallback_group
def setup_checker(self):
bot = ReviewBot.CommandLineInterface.setup_checker(self)
if self.options.force:
bot.force = True
bot.do_comments = self.options.comment
if not self.options.openqa:
raise osc.oscerr.WrongArgs("missing openqa url")
bot.openqa = OpenQA_Client(server=self.options.openqa)
project, target, api = self._load_metadata()
bot.api_map = api
bot.tgt_repo = target
bot.project_settings = project
return bot

605
oqamaint/openqabot.py Normal file
View File

@ -0,0 +1,605 @@
# -*- coding: utf-8 -*-
from collections import namedtuple
from datetime import date
import md5
from pprint import pformat
import re
from urllib2 import HTTPError
import requests
import osc.core
import ReviewBot
from osclib.comments import CommentAPI
from suse import SUSEUpdate
try:
from xml.etree import cElementTree as ET
except ImportError:
from xml.etree import ElementTree as ET
try:
import simplejson as json
except ImportError:
import json
QA_UNKNOWN = 0
QA_INPROGRESS = 1
QA_FAILED = 2
QA_PASSED = 3
Package = namedtuple('Package', ('name', 'version', 'release'))
pkgname_re = re.compile(r'(?P<name>.+)-(?P<version>[^-]+)-(?P<release>[^-]+)\.(?P<arch>[^.]+)\.rpm')
comment_marker_re = re.compile(
r'<!-- openqa state=(?P<state>done|seen)(?: result=(?P<result>accepted|declined|none))?(?: revision=(?P<revision>\d+))? -->')
class OpenQABot(ReviewBot.ReviewBot):
""" check ABI of library packages
"""
def __init__(self, *args, **kwargs):
super(OpenQABot, self).__init__(*args, **kwargs)
self.tgt_repo = {}
self.project_settings = {}
self.api_map = {}
self.force = False
self.openqa = None
self.commentapi = CommentAPI(self.apiurl)
self.update_test_builds = {}
self.pending_target_repos = set()
self.openqa_jobs = {}
def gather_test_builds(self):
for prj, u in self.tgt_repo[self.openqa.baseurl].items():
buildnr = 0
cjob = 0
for j in self.jobs_for_target(u):
# avoid going backwards in job ID
if cjob > int(j['id']):
continue
buildnr = j['settings']['BUILD']
cjob = int(j['id'])
self.update_test_builds[prj] = buildnr
jobs = self.jobs_for_target(u, build=buildnr)
self.openqa_jobs[prj] = jobs
if self.calculate_qa_status(jobs) == QA_INPROGRESS:
self.pending_target_repos.add(prj)
# reimplemention from baseclass
def check_requests(self):
if self.ibs:
self.check_suse_incidents()
# first calculate the latest build number for current jobs
self.gather_test_builds()
started = []
# then check progress on running incidents
for req in self.requests:
jobs = self.request_get_openqa_jobs(req, incident=True, test_repo=True)
ret = self.calculate_qa_status(jobs)
if ret != QA_UNKNOWN:
started.append(req)
all_requests = self.requests
self.requests = started
self.logger.debug("check started requests")
super(OpenQABot, self).check_requests()
self.requests = all_requests
skipped_one = False
# now make sure the jobs are for current repo
for prj, u in self.tgt_repo[self.openqa.baseurl].items():
if prj in self.pending_target_repos:
skipped_one = True
continue
self.trigger_build_for_target(prj, u)
# do not schedule new incidents unless we finished
# last wave
if skipped_one:
return
self.logger.debug("Check all requests")
super(OpenQABot, self).check_requests()
# check a set of repos for their primary checksums
@staticmethod
def calculate_repo_hash(repos):
m = md5.new()
# if you want to force it, increase this number
m.update('b')
for url in repos:
url += '/repodata/repomd.xml'
try:
root = ET.parse(osc.core.http_GET(url)).getroot()
except HTTPError:
raise
cs = root.find(
'.//{http://linux.duke.edu/metadata/repo}data[@type="primary"]/{http://linux.duke.edu/metadata/repo}checksum')
m.update(cs.text)
return m.hexdigest()
def is_incident_in_testing(self, incident):
# hard coded for now as we only run this code for SUSE Maintenance workflow
project = 'SUSE:Maintenance:{}'.format(incident)
xpath = "(state/@name='review') and (action/source/@project='{}' and action/@type='maintenance_release')".format(project)
res = osc.core.search(self.apiurl, request=xpath)['request']
# return the one and only (or None)
return res.find('request')
def calculate_incidents(self, incidents):
"""
get incident numbers from SUSE:Maintenance:Test project
returns dict with openQA var name : string with numbers
"""
self.logger.debug("calculate_incidents: {}".format(pformat(incidents)))
l_incidents = []
for kind, prj in incidents.items():
packages = osc.core.meta_get_packagelist(self.apiurl, prj)
incidents = []
# filter out incidents in staging
for incident in packages:
# remove patchinfo. prefix
incident = incident.replace('_', '.').split('.')[1]
req = self.is_incident_in_testing(incident)
# without release request it's in staging
if not req:
continue
# skip kgraft patches from aggregation
req_ = osc.core.Request()
req_.read(req)
src_prjs = {a.src_project for a in req_.actions}
if SUSEUpdate.kgraft_target(self.apiurl, src_prjs.pop()):
self.logger.debug("calculate_incidents: Incident is kgraft - {} ".format(incident))
continue
incidents.append(incident)
l_incidents.append((kind + '_TEST_ISSUES', ','.join(incidents)))
self.logger.debug("Calculate incidents:{}".format(pformat(l_incidents)))
return l_incidents
def jobs_for_target(self, data, build=None):
settings = data['settings'][0]
values = {
'distri': settings['DISTRI'],
'version': settings['VERSION'],
'arch': settings['ARCH'],
'flavor': settings['FLAVOR'],
'scope': 'relevant',
'latest': '1',
}
if build:
values['build'] = build
else:
values['test'] = data['test']
self.logger.debug("Get jobs: {}".format(pformat(values)))
return self.openqa.openqa_request('GET', 'jobs', values)['jobs']
# we don't know the current BUILD and querying all jobs is too expensive
# so we need to check for one known TEST first
# if that job doesn't contain the proper hash, we trigger a new one
# and then we know the build
def trigger_build_for_target(self, prj, data):
today = date.today().strftime("%Y%m%d")
try:
repohash = self.calculate_repo_hash(data['repos'])
except HTTPError as e:
self.logger.debug("REPOHAS not calculated with response {}".format(e))
return
buildnr = None
jobs = self.jobs_for_target(data)
for job in jobs:
if job['settings'].get('REPOHASH', '') == repohash:
# take the last in the row
buildnr = job['settings']['BUILD']
self.update_test_builds[prj] = buildnr
# ignore old build numbers, we want a fresh run every day
# to find regressions in the tests and to get data about
# randomly failing tests
if buildnr and buildnr.startswith(today):
return
buildnr = 0
# not found, then check for the next free build nr
for job in jobs:
build = job['settings']['BUILD']
if build and build.startswith(today):
try:
nr = int(build.split('-')[1])
if nr > buildnr:
buildnr = nr
except ValueError:
continue
buildnr = "{!s}-{:d}".format(today, buildnr + 1)
for s in data['settings']:
# now schedule it for real
if 'incidents' in data.keys():
for x, y in self.calculate_incidents(data['incidents']):
s[x] = y
s['BUILD'] = buildnr
s['REPOHASH'] = repohash
self.logger.debug("Prepared: {}".format(pformat(s)))
if not self.dryrun:
try:
self.logger.info("Openqa isos POST {}".format(pformat(s)))
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
except Exception as e:
self.logger.error(e)
self.update_test_builds[prj] = buildnr
def request_get_openqa_jobs(self, req, incident=True, test_repo=False):
ret = None
types = {a.type for a in req.actions}
if 'maintenance_release' in types:
src_prjs = {a.src_project for a in req.actions}
if len(src_prjs) != 1:
raise Exception("can't handle maintenance_release from different incidents")
build = src_prjs.pop()
tgt_prjs = {a.tgt_project for a in req.actions}
ret = []
if incident:
ret += self.openqa_jobs[build]
for prj in sorted(tgt_prjs):
repo_settings = self.tgt_repo.get(self.openqa.baseurl, {})
if test_repo and prj in repo_settings:
repo_jobs = self.openqa_jobs[prj]
ret += repo_jobs
return ret
def calculate_qa_status(self, jobs=None):
if not jobs:
return QA_UNKNOWN
j = {}
has_failed = False
in_progress = False
for job in jobs:
if job['clone_id']:
continue
name = job['name']
if name in j and int(job['id']) < int(j[name]['id']):
continue
j[name] = job
if job['state'] not in ('cancelled', 'done'):
in_progress = True
else:
if job['result'] != 'passed' and job['result'] != 'softfailed':
has_failed = True
if not j:
return QA_UNKNOWN
if in_progress:
return QA_INPROGRESS
if has_failed:
return QA_FAILED
return QA_PASSED
def add_comment(self, msg, state, request_id=None, result=None):
if not self.do_comments:
return
comment = "<!-- openqa state={!s}{!s} -->\n".format(state, ' result={!s}'.format(result) if result else '')
comment += "\n" + msg
info = self.find_obs_request_comment(request_id=request_id)
comment_id = info.get('id', None)
if state == info.get('state', 'missing'):
lines_before = len(info['comment'].split('\n'))
lines_after = len(comment.split('\n'))
if lines_before == lines_after:
self.logger.info("not worth the update, previous comment %s is state %s", comment_id, info['state'])
return
self.logger.info("adding comment to %s, state %s result %s", request_id, state, result)
self.logger.info("message: %s", msg)
if not self.dryrun:
if comment_id:
self.commentapi.delete(comment_id)
self.commentapi.add_comment(request_id=request_id, comment=str(comment))
# escape markdown
@staticmethod
def emd(str):
return str.replace('_', r'\_')
@staticmethod
def get_step_url(testurl, modulename):
failurl = testurl + '/modules/{!s}/fails'.format(modulename)
fails = requests.get(failurl).json()
failed_step = fails.get('first_failed_step', 1)
return "[{!s}]({!s}#step/{!s}/{:d})".format(OpenQABot.emd(modulename), testurl, modulename, failed_step)
@staticmethod
def job_test_name(job):
return "{!s}@{!s}".format(OpenQABot.emd(job['settings']['TEST']), OpenQABot.emd(job['settings']['MACHINE']))
def summarize_one_openqa_job(self, job):
testurl = osc.core.makeurl(self.openqa.baseurl, ['tests', str(job['id'])])
if not job['result'] in ['passed', 'failed', 'softfailed']:
rstring = job['result']
if rstring == 'none':
return None
return '\n- [{!s}]({!s}) is {!s}'.format(self.job_test_name(job), testurl, rstring)
modstrings = []
for module in job['modules']:
if module['result'] != 'failed':
continue
modstrings.append(self.get_step_url(testurl, module['name']))
if modstrings:
return '\n- [{!s}]({!s}) failed in {!s}'.format(self.job_test_name(job), testurl, ','.join(modstrings))
elif job['result'] == 'failed': # rare case: fail without module fails
return '\n- [{!s}]({!s}) failed'.format(self.job_test_name(job), testurl)
return ''
def summarize_openqa_jobs(self, jobs):
groups = {}
for job in jobs:
gl = "{!s}@{!s}".format(self.emd(job['group']), self.emd(job['settings']['FLAVOR']))
if gl not in groups:
groupurl = osc.core.makeurl(self.openqa.baseurl, ['tests', 'overview'],
{'version': job['settings']['VERSION'],
'groupid': job['group_id'],
'flavor': job['settings']['FLAVOR'],
'distri': job['settings']['DISTRI'],
'build': job['settings']['BUILD'],
})
groups[gl] = {'title': "__Group [{!s}]({!s})__\n".format(gl, groupurl),
'passed': 0, 'unfinished': 0, 'failed': []}
job_summary = self.summarize_one_openqa_job(job)
if job_summary is None:
groups[gl]['unfinished'] = groups[gl]['unfinished'] + 1
continue
# None vs ''
if not len(job_summary):
groups[gl]['passed'] = groups[gl]['passed'] + 1
continue
# if there is something to report, hold the request
# TODO: what is this ?
# qa_state = QA_FAILED
# gmsg = groups[gl]
groups[gl]['failed'].append(job_summary)
msg = ''
for group in sorted(groups.keys()):
msg += "\n\n" + groups[group]['title']
infos = []
if groups[group]['passed']:
infos.append("{:d} tests passed".format(groups[group]['passed']))
if len(groups[group]['failed']):
infos.append("{:d} tests failed".format(len(groups[group]['failed'])))
if groups[group]['unfinished']:
infos.append("{:d} unfinished tests".format(groups[group]['unfinished']))
msg += "(" + ', '.join(infos) + ")\n"
for fail in groups[group]['failed']:
msg += fail
return msg.rstrip('\n')
def check_one_request(self, req):
ret = None
try:
jobs = self.request_get_openqa_jobs(req)
qa_state = self.calculate_qa_status(jobs)
self.logger.debug("request %s state %s", req.reqid, qa_state)
msg = None
if self.force or qa_state == QA_UNKNOWN:
ret = super(OpenQABot, self).check_one_request(req)
jobs = self.request_get_openqa_jobs(req)
if self.force:
# make sure to delete previous comments if we're forcing
info = self.find_obs_request_comment(request_id=req.reqid)
if 'id' in info:
self.logger.debug("deleting old comment %s", info['id'])
if not self.dryrun:
self.commentapi.delete(info['id'])
if jobs:
# no notification until the result is done
osc.core.change_review_state(self.apiurl, req.reqid, newstate='new',
by_group=self.review_group, by_user=self.review_user,
message='now testing in openQA')
else:
msg = "no openQA tests defined"
self.add_comment(msg, 'done', request_id=req.reqid, result='accepted')
ret = True
elif qa_state == QA_FAILED or qa_state == QA_PASSED:
# don't take test repo results into the calculation of total
# this is for humans to decide which incident broke the test repo
jobs += self.request_get_openqa_jobs(req, incident=False, test_repo=True)
if self.calculate_qa_status(jobs) == QA_INPROGRESS:
self.logger.info(
"incident tests for request %s are done, but need to wait for test repo", req.reqid)
return
if qa_state == QA_PASSED:
msg = "openQA tests passed\n"
result = 'accepted'
ret = True
else:
msg = "openQA tests problematic\n"
result = 'declined'
ret = False
msg += self.summarize_openqa_jobs(jobs)
self.add_comment(msg, 'done', result=result, request_id=req.reqid)
elif qa_state == QA_INPROGRESS:
self.logger.info("request %s still in progress", req.reqid)
else:
raise Exception("unknown QA state %d", qa_state)
except Exception:
import traceback
self.logger.error("unhandled exception in openQA Bot")
self.logger.error(traceback.format_exc())
ret = None
return ret
def find_obs_request_comment(self, request_id=None, project_name=None):
"""Return previous comments (should be one)."""
if self.do_comments:
comments = self.commentapi.get_comments(request_id=request_id, project_name=project_name)
for c in comments.values():
m = comment_marker_re.match(c['comment'])
if m:
return {
'id': c['id'],
'state': m.group('state'),
'result': m.group('result'),
'comment': c['comment'],
'revision': m.group('revision')}
return {}
def check_product(self, job, product_prefix):
pmap = self.api_map[product_prefix]
posts = []
for arch in pmap['archs']:
need = False
settings = {'VERSION': pmap['version'], 'ARCH': arch}
settings['DISTRI'] = 'sle' if 'distri' not in pmap else pmap['distri']
issues = pmap.get('issues', {})
issues['OS_TEST_ISSUES'] = issues.get('OS_TEST_ISSUES', product_prefix)
required_issue = pmap.get('required_issue', False)
for key, prefix in issues.items():
self.logger.debug("{} {}".format(key, prefix))
if prefix + arch in job['channels']:
settings[key] = str(job['id'])
need = True
if required_issue:
if required_issue not in settings:
need = False
if need:
update = self.project_settings[product_prefix + arch]
update.apiurl = self.apiurl
update.logger = self.logger
for j in update.settings(
update.maintenance_project + ':' + str(job['id']),
product_prefix + arch, []):
if not job.get('openqa_build'):
job['openqa_build'] = update.get_max_revision(job)
if not job.get('openqa_build'):
return []
j['BUILD'] += '.' + str(job['openqa_build'])
j.update(settings)
# kGraft jobs can have different version
if 'real_version' in j:
j['VERSION'] = j['real_version']
del j['real_version']
posts.append(j)
self.logger.debug("Pmap: {} Posts: {}".format(pmap, posts))
return posts
def incident_openqa_jobs(self, s):
return self.openqa.openqa_request(
'GET', 'jobs',
{
'distri': s['DISTRI'],
'version': s['VERSION'],
'arch': s['ARCH'],
'flavor': s['FLAVOR'],
'build': s['BUILD'],
'scope': 'relevant',
'latest': '1'
})['jobs']
def check_suse_incidents(self):
for inc in requests.get('https://maintenance.suse.de/api/incident/active/').json():
self.logger.info("Incident number: {}".format(inc))
job = requests.get('https://maintenance.suse.de/api/incident/' + inc).json()
if job['meta']['state'] in ['final', 'gone']:
continue
# required in job: project, id, channels
self.test_job(job['base'])
def test_job(self, job):
self.logger.debug("Called test_job with: {}".format(job))
incident_project = str(job['project'])
try:
comment_info = self.find_obs_request_comment(project_name=incident_project)
except HTTPError as e:
self.logger.debug("Couldn't loaadd comments - {}".format(e))
return
comment_id = comment_info.get('id', None)
comment_build = str(comment_info.get('revision', ''))
openqa_posts = []
for prod in self.api_map.keys():
self.logger.debug("{} -- product in apimap".format(prod))
openqa_posts += self.check_product(job, prod)
openqa_jobs = []
for s in openqa_posts:
jobs = self.incident_openqa_jobs(s)
# take the project comment as marker for not posting jobs
if not len(jobs) and comment_build != str(job['openqa_build']):
if self.dryrun:
self.logger.info('WOULD POST:{}'.format(pformat(json.dumps(s, sort_keys=True))))
else:
self.logger.info("Posted: {}".format(pformat(json.dumps(s, sort_keys=True))))
self.openqa.openqa_request('POST', 'isos', data=s, retries=1)
openqa_jobs += self.incident_openqa_jobs(s)
else:
self.logger.info("{} got {}".format(pformat(s), len(jobs)))
openqa_jobs += jobs
self.openqa_jobs[incident_project] = openqa_jobs
if len(openqa_jobs) == 0:
self.logger.debug("No openqa jobs defined")
return
# print openqa_jobs
msg = self.summarize_openqa_jobs(openqa_jobs)
state = 'seen'
result = 'none'
qa_status = self.calculate_qa_status(openqa_jobs)
if qa_status == QA_PASSED:
result = 'accepted'
state = 'done'
if qa_status == QA_FAILED:
result = 'declined'
state = 'done'
comment = "<!-- openqa state={!s} result={!s} revision={!s} -->\n".format(
state, result, job.get('openqa_build'))
comment += msg
if comment_id and state != 'done':
self.logger.info("%s is already commented, wait until done", incident_project)
return
if comment_info.get('comment', '').rstrip('\n') == comment.rstrip('\n'):
self.logger.info("%s comment did not change", incident_project)
return
self.logger.info("adding comment to %s, state %s", incident_project, state)
if not self.dryrun:
if comment_id:
self.logger.debug("delete comment: {}".format(comment_id))
self.commentapi.delete(comment_id)
self.commentapi.add_comment(project_name=str(incident_project), comment=str(comment))

34
oqamaint/opensuse.py Normal file
View File

@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
from update import Update
class openSUSEUpdate(Update):
repo_prefix = 'http://download.opensuse.org/repositories'
maintenance_project = 'openSUSE:Maintenance'
def settings(self, src_prj, dst_prj, packages):
settings = super(openSUSEUpdate, self).settings(src_prj, dst_prj, packages)
settings = settings[0]
# openSUSE:Maintenance key
settings['IMPORT_GPG_KEYS'] = 'gpg-pubkey-b3fd7e48-5549fd0f'
settings['ZYPPER_ADD_REPO_PREFIX'] = 'incident'
if packages:
# XXX: this may fail in various ways
# - conflicts between subpackages
# - added packages
# - conflicts with installed packages (e.g sendmail vs postfix)
settings['INSTALL_PACKAGES'] = ' '.join(set([p.name for p in packages]))
settings['VERIFY_PACKAGE_VERSIONS'] = ' '.join(
['{} {}-{}'.format(p.name, p.version, p.release) for p in packages])
settings['ZYPPER_ADD_REPOS'] = settings['INCIDENT_REPO']
settings['ADDONURL'] = settings['INCIDENT_REPO']
settings['WITH_MAIN_REPO'] = 1
settings['WITH_UPDATE_REPO'] = 1
return [settings]

1
oqamaint/osclib Symbolic link
View File

@ -0,0 +1 @@
../osclib

93
oqamaint/suse.py Normal file
View File

@ -0,0 +1,93 @@
# -*- coding: utf-8 -*-
import re
import requests
import osc.core
from update import Update
MINIMALS = {
x.rstrip()
for x in requests.get(
'https://gitlab.suse.de/qa-maintenance/metadata/raw/master/packages-to-be-tested-on-minimal-systems').iter_lines()
if len(x) > 0 and not(x.startswith("#") or x.startswith(' '))}
class SUSEUpdate(Update):
repo_prefix = 'http://download.suse.de/ibs'
maintenance_project = 'SUSE:Maintenance'
def __init__(self, settings):
super(SUSEUpdate, self).__init__(settings)
self.opensuse = False
# we take requests that have a kgraft-patch package as kgraft patch (suprise!)
@staticmethod
def kgraft_target(apiurl, prj):
target = None
skip = False
pattern = re.compile(r"kgraft-patch-([^.]+)\.")
for package in osc.core.meta_get_packagelist(apiurl, prj):
if package.startswith("kernel-"):
skip = True
break
match = re.match(pattern, package)
if match:
target = match.group(1)
if skip:
return None
return target
@staticmethod
def parse_kgraft_version(kgraft_target):
return kgraft_target.lstrip('SLE').split('_')[0]
def add_minimal_settings(self, prj, settings):
minimal = False
for pkg in self.incident_packages(prj):
if pkg in MINIMALS:
minimal = True
if not minimal:
return []
settings = settings.copy()
settings['FLAVOR'] += '-Minimal'
return [settings]
@staticmethod
def add_kernel_settings(settings):
settings = settings.copy()
if settings['BUILD'].split(":")[-1].startswith('kernel-') and settings['FLAVOR'] == 'Server-DVD-Incidents':
settings['FLAVOR'] += '-Kernel'
return [settings]
return []
def settings(self, src_prj, dst_prj, packages):
settings = super(SUSEUpdate, self).settings(src_prj, dst_prj, packages)
# kGraft Handling - Fully supported kGraft lives in own space, but LTSS in standard LTSS channel
for x in range(len(settings)):
if settings[x]['FLAVOR'] == 'Server-DVD-Incidents' and settings[x]['BUILD'].split(
':')[-1].startswith('kgraft-patch'):
settings[x]['FLAVOR'] = 'Server-DVD-Incidents-Kernel'
self.logger.warning("kGraft started from INCIDENTS !!")
if settings[x]['FLAVOR'] == 'Server-DVD-Incidents-Kernel' and not settings[x]['BUILD'].split(
':')[-1].startswith('kgraft-patch'):
del settings[x]
continue
if settings[x]['FLAVOR'] == 'Server-DVD-Incidents-Kernel':
settings[x]['KGRAFT'] = "1"
if settings[x]['VERSION'] == '12':
settings[x]['real_version'] = self.parse_kgraft_version(self.kgraft_target(self.apiurl, src_prj))
if not len(settings):
return []
settings += self.add_minimal_settings(src_prj, settings[0])
settings += self.add_kernel_settings(settings[0])
self.logger.debug("settings are: {}".format(settings))
return settings

128
oqamaint/update.py Normal file
View File

@ -0,0 +1,128 @@
# -*- coding: utf-8 -*-
import re
import requests
# python 3 has gzip decompress function
try:
from gzip import decompress
except ImportError:
from gzip import GzipFile
import io
def decompress(data):
with GzipFile(fileobj=io.BytesIO(data)) as f:
return f.read()
# use cElementTree by default, fallback to pure python
try:
from xml.etree import cElementTree as ET
except ImportError:
from xml.etree import ElementTree as ET
import osc.core
from osclib.memoize import memoize
class Update(object):
incident_name_cache = {}
def __init__(self, settings):
self._settings = settings
self._settings['_NOOBSOLETEBUILD'] = '1'
self.opensuse = True
def get_max_revision(self, job):
repo = self.repo_prefix + '/'
repo += self.maintenance_project.replace(':', ':/')
repo += ':/{!s}'.format(job['id'])
max_revision = 0
for channel in job['channels']:
crepo = repo + '/' + channel.replace(':', '_')
xml = requests.get(crepo + '/repodata/repomd.xml')
if not xml.ok:
self.logger.info("{} skipped .. need wait".format(crepo))
# if one fails, we skip it and wait
return None
root = ET.fromstring(xml.text)
rev = root.find('.//{http://linux.duke.edu/metadata/repo}revision')
rev = int(rev.text)
if rev > max_revision:
max_revision = rev
return max_revision
def settings(self, src_prj, dst_prj, packages):
s = self._settings.copy()
# start with a colon so it looks cool behind 'Build' :/
s['BUILD'] = ':' + src_prj.split(':')[-1]
name = self.incident_name(src_prj)
repo = dst_prj.replace(':', '_')
repo = '{!s}/{!s}/{!s}/'.format(self.repo_prefix, src_prj.replace(':', ':/'), repo)
patch_id = self.patch_id(repo)
if patch_id:
s['INCIDENT_REPO'] = repo
s['INCIDENT_PATCH'] = patch_id
s['BUILD'] += ':' + name
return [s]
@memoize()
def incident_packages(self, prj):
packages = []
for package in osc.core.meta_get_packagelist(self.apiurl, prj):
if package.endswith('SUSE_Channels') or package.startswith('patchinfo'):
continue
parts = package.split('.')
# remove target name
parts.pop()
packages.append('.'.join(parts))
return packages
# grab the updateinfo from the given repo and return its patch's id
@staticmethod
def patch_id(repo):
url = repo + 'repodata/repomd.xml'
repomd = requests.get(url)
if not repomd.ok:
return None
root = ET.fromstring(repomd.text)
cs = root.find(
'.//{http://linux.duke.edu/metadata/repo}data[@type="updateinfo"]/{http://linux.duke.edu/metadata/repo}location')
try:
url = repo + cs.attrib['href']
except AttributeError:
return None
repomd = requests.get(url).content
root = ET.fromstring(decompress(repomd))
return root.find('.//id').text
# take the first package name we find - often enough correct
def incident_name(self, prj):
if prj not in self.incident_name_cache:
self.incident_name_cache[prj] = self._incident_name(prj)
return self.incident_name_cache[prj]
def _incident_name(self, prj):
shortest_pkg = None
for package in osc.core.meta_get_packagelist(self.apiurl, prj):
if package.startswith('patchinfo'):
continue
if package.endswith('SUSE_Channels'):
continue
# other tools on SLE have data from SMELT without acces to this attrib
if self.opensuse:
url = osc.core.makeurl(self.apiurl, ('source', prj, package, '_link'))
root = ET.parse(osc.core.http_GET(url)).getroot()
if root.attrib.get('cicount'):
continue
if not shortest_pkg or len(package) < len(shortest_pkg):
shortest_pkg = package
if not shortest_pkg:
shortest_pkg = 'unknown'
match = re.match(r'^(.*)\.[^\.]*$', shortest_pkg)
return match.group(1) if match else shortest_pkg