2019-05-15 20:32:50 +02:00
|
|
|
#!/usr/bin/python3
|
2018-10-05 15:26:10 +02:00
|
|
|
|
|
|
|
import argparse
|
2018-10-12 11:06:54 +02:00
|
|
|
import logging
|
2022-02-18 13:17:02 +01:00
|
|
|
|
2018-10-05 15:26:10 +02:00
|
|
|
import json
|
|
|
|
import osc
|
|
|
|
import re
|
2018-10-10 20:59:16 +02:00
|
|
|
from osc.core import http_GET, http_POST, makeurl
|
2018-10-05 15:26:10 +02:00
|
|
|
from osclib.conf import Config
|
|
|
|
from osclib.stagingapi import StagingAPI
|
|
|
|
from lxml import etree as ET
|
2018-10-08 14:05:25 +02:00
|
|
|
from openqa_client.client import OpenQA_Client
|
2023-01-24 15:02:56 +01:00
|
|
|
from packaging import version
|
2022-02-18 18:29:27 +01:00
|
|
|
from urllib.error import HTTPError
|
2019-05-15 20:32:50 +02:00
|
|
|
from urllib.parse import quote_plus
|
2018-11-15 14:03:26 +01:00
|
|
|
|
|
|
|
import requests
|
2019-09-11 18:12:51 -05:00
|
|
|
from osclib.PubSubConsumer import PubSubConsumer
|
2018-10-05 15:26:10 +02:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-10-05 15:26:10 +02:00
|
|
|
class Project(object):
|
|
|
|
def __init__(self, name):
|
2018-10-12 11:28:42 +02:00
|
|
|
self.name = name
|
2018-10-05 15:26:10 +02:00
|
|
|
Config(apiurl, name)
|
|
|
|
self.api = StagingAPI(apiurl, name)
|
|
|
|
self.staging_projects = dict()
|
2018-10-08 14:05:25 +02:00
|
|
|
self.listener = None
|
2018-11-03 18:28:57 +01:00
|
|
|
self.logger = logging.getLogger(__name__)
|
2018-10-10 20:59:16 +02:00
|
|
|
self.replace_string = self.api.attribute_value_load('OpenQAMapping')
|
2018-10-12 11:28:42 +02:00
|
|
|
|
|
|
|
def init(self):
|
2020-03-13 15:19:07 +01:00
|
|
|
projects = set()
|
|
|
|
for project in self.api.get_staging_projects():
|
|
|
|
if self.api.is_adi_project(project):
|
2018-10-05 15:26:10 +02:00
|
|
|
continue
|
2020-03-13 15:19:07 +01:00
|
|
|
self.staging_projects[project] = self.initial_staging_state(project)
|
|
|
|
projects.add(project)
|
|
|
|
return projects
|
2018-10-05 15:26:10 +02:00
|
|
|
|
|
|
|
def staging_letter(self, name):
|
|
|
|
return name.split(':')[-1]
|
|
|
|
|
|
|
|
def map_iso(self, staging_project, iso):
|
2018-10-10 20:59:16 +02:00
|
|
|
parts = self.replace_string.split('/')
|
2018-10-09 09:07:34 +02:00
|
|
|
if parts[0] != 's':
|
|
|
|
raise Exception("{}'s iso_replace_string does not start with s/".format(self.name))
|
|
|
|
old = parts[1]
|
|
|
|
new = parts[2]
|
|
|
|
new = new.replace('$LETTER', self.staging_letter(staging_project))
|
2022-07-15 12:59:17 +02:00
|
|
|
try:
|
2022-11-24 10:39:00 +01:00
|
|
|
stagingiso = re.compile(old).sub(new, iso)
|
2022-07-15 12:59:17 +02:00
|
|
|
except re.error:
|
|
|
|
self.logger.error(f"_MAP_ISO {self.replace_string} does not create valid regexps in {self.name}")
|
2022-11-24 10:39:00 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
if stagingiso == iso:
|
|
|
|
self.logger.info(f"{self.replace_string} did not map {iso} properly, ignoring")
|
|
|
|
return None
|
|
|
|
|
|
|
|
return stagingiso
|
2018-10-05 15:26:10 +02:00
|
|
|
|
|
|
|
def gather_isos(self, name, repository):
|
2022-11-23 17:23:30 +01:00
|
|
|
ret = []
|
|
|
|
|
|
|
|
# Fetch /published/prj/repo/iso/*.iso
|
2018-10-05 15:26:10 +02:00
|
|
|
url = self.api.makeurl(['published', name, repository, 'iso'])
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for entry in root.findall('entry'):
|
2022-11-23 17:23:30 +01:00
|
|
|
if entry.get('name').endswith('.iso'):
|
2018-10-05 15:26:10 +02:00
|
|
|
ret.append(self.map_iso(name, entry.get('name')))
|
2022-11-24 10:39:00 +01:00
|
|
|
|
2022-11-23 17:23:30 +01:00
|
|
|
# Fetch /published/prj/repo/iso/*.qcow2
|
|
|
|
url = self.api.makeurl(['published', name, repository])
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for entry in root.findall('entry'):
|
|
|
|
filename = entry.get('name')
|
|
|
|
if filename.endswith('.qcow2') or filename.endswith('.raw.xz'):
|
|
|
|
ret.append(self.map_iso(name, filename))
|
|
|
|
|
2022-11-24 10:39:00 +01:00
|
|
|
# Filter out isos which couldn't be mapped
|
|
|
|
ret = [iso for iso in ret if iso]
|
|
|
|
|
2018-10-05 15:26:10 +02:00
|
|
|
return ret
|
|
|
|
|
2018-10-08 10:37:28 +02:00
|
|
|
def gather_buildid(self, name, repository):
|
|
|
|
url = self.api.makeurl(['published', name, repository], {'view': 'status'})
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
id = ET.parse(f).getroot().find('buildid')
|
|
|
|
if id is not None:
|
|
|
|
return id.text
|
|
|
|
|
2018-10-05 15:26:10 +02:00
|
|
|
def initial_staging_state(self, name):
|
2018-10-08 10:37:28 +02:00
|
|
|
return {'isos': self.gather_isos(name, 'images'),
|
|
|
|
'id': self.gather_buildid(name, 'images')}
|
2018-10-05 15:26:10 +02:00
|
|
|
|
2019-12-10 02:01:13 +01:00
|
|
|
def fetch_openqa_jobs(self, staging, iso, openqa_infos):
|
2018-10-08 14:05:25 +02:00
|
|
|
openqa = self.listener.jobs_for_iso(iso)
|
2018-10-08 20:09:10 +02:00
|
|
|
# collect job infos to pick names
|
2018-10-08 14:05:25 +02:00
|
|
|
for job in openqa:
|
|
|
|
print(staging, iso, job['id'], job['state'], job['result'],
|
2022-11-23 15:11:42 +01:00
|
|
|
job['settings']['FLAVOR'], job['settings']['TEST'], job['settings']['MACHINE'])
|
2018-10-08 20:09:10 +02:00
|
|
|
openqa_infos[job['id']] = {'url': self.listener.test_url(job)}
|
|
|
|
openqa_infos[job['id']]['state'] = self.map_openqa_result(job)
|
2023-01-24 15:02:56 +01:00
|
|
|
openqa_infos[job['id']]['build'] = job['settings']['BUILD']
|
2022-11-23 15:11:42 +01:00
|
|
|
openqa_infos[job['id']]['name'] = f"{job['settings']['FLAVOR']}-{job['settings']['TEST']}@{job['settings']['MACHINE']}"
|
2018-10-08 20:09:10 +02:00
|
|
|
|
2023-01-24 15:02:56 +01:00
|
|
|
def compare_simple_builds(build1, build2):
|
|
|
|
"""Simple build number comparison"""
|
|
|
|
ver1 = version.parse(build1)
|
|
|
|
ver2 = version.parse(build2)
|
|
|
|
if ver1 < ver2:
|
|
|
|
return -1
|
|
|
|
if ver1 > ver2:
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def compare_composite_builds(build1, build2):
|
|
|
|
"""Compare BUILD numbers consisting of multiple _-separated components."""
|
|
|
|
components1 = build1.split('_')
|
|
|
|
components2 = build2.split('_')
|
|
|
|
if len(components1) != len(components2):
|
|
|
|
raise Exception(f'Failed to compare {build1} and {build2}: Different format')
|
|
|
|
|
|
|
|
component_cmps = [Project.compare_simple_builds(components1[i], components2[i]) for i in range(0, len(components1))]
|
|
|
|
less = -1 in component_cmps
|
|
|
|
greater = 1 in component_cmps
|
|
|
|
if less and greater:
|
|
|
|
raise Exception(f'Failed to compare {build1} and {build2}: Not ordered')
|
|
|
|
if less:
|
|
|
|
return -1
|
|
|
|
if greater:
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
|
2019-12-10 02:01:13 +01:00
|
|
|
def update_staging_status(self, staging):
|
|
|
|
openqa_infos = dict()
|
|
|
|
for iso in self.staging_projects[staging]['isos']:
|
|
|
|
self.fetch_openqa_jobs(staging, iso, openqa_infos)
|
|
|
|
|
|
|
|
buildid = self.staging_projects[staging].get('id')
|
|
|
|
if not buildid:
|
|
|
|
self.logger.info("I don't know the build id of " + staging)
|
|
|
|
return
|
|
|
|
# all openQA jobs are created at the same URL
|
|
|
|
url = self.api.makeurl(['status_reports', 'published', staging, 'images', 'reports', buildid])
|
|
|
|
|
2018-10-08 20:09:10 +02:00
|
|
|
# make sure the names are unique
|
2023-01-24 15:02:56 +01:00
|
|
|
obsolete_jobs = []
|
2018-10-08 20:09:10 +02:00
|
|
|
taken_names = dict()
|
|
|
|
for id in openqa_infos:
|
|
|
|
name = openqa_infos[id]['name']
|
|
|
|
if name in taken_names:
|
2023-01-24 15:02:56 +01:00
|
|
|
# There are multiple jobs with that specific FLAVOR-TEST@MACHINE.
|
|
|
|
# In SLE Micro, jobs currently use BUILD=(dvdbuild)_(image_build),
|
|
|
|
# so if the dvd is rebuilt, new image jobs are triggered for the
|
|
|
|
# same binary. The openQA ?latest=1 filter doesn't look at that,
|
|
|
|
# so we have to figure out which of those is the most recent one.
|
|
|
|
build1 = openqa_infos[taken_names[name]]['build']
|
|
|
|
build2 = openqa_infos[id]['build']
|
|
|
|
if '_' in build1 and '_' in build2 and build1 != build2:
|
|
|
|
# Use the more recent build
|
|
|
|
buildcmp = Project.compare_composite_builds(build1, build2)
|
|
|
|
self.logger.info(f'Multiple builds for {name}, {build1} and {build2}. Comparison: {buildcmp}')
|
|
|
|
if buildcmp < 0: # Drop the previous one
|
|
|
|
obsolete_jobs.append(taken_names[name])
|
|
|
|
taken_names[name] = id
|
|
|
|
continue
|
|
|
|
elif buildcmp > 0: # Drop this one
|
|
|
|
obsolete_jobs.append(id)
|
|
|
|
continue
|
|
|
|
|
2022-11-23 15:11:42 +01:00
|
|
|
raise Exception(f'Names of job #{id} and #{taken_names[name]} collide: {name}')
|
2018-10-08 20:09:10 +02:00
|
|
|
taken_names[name] = id
|
|
|
|
|
2023-01-24 15:02:56 +01:00
|
|
|
for id in obsolete_jobs:
|
|
|
|
del openqa_infos[id]
|
|
|
|
|
2018-10-08 20:09:10 +02:00
|
|
|
for info in openqa_infos.values():
|
2018-11-03 18:28:57 +01:00
|
|
|
xml = self.openqa_check_xml(info['url'], info['state'], 'openqa:' + info['name'])
|
2018-10-08 15:43:45 +02:00
|
|
|
try:
|
2022-11-23 14:57:47 +01:00
|
|
|
if self.listener.dryrun:
|
|
|
|
print(f"Would POST to {url}: {xml}")
|
|
|
|
else:
|
|
|
|
http_POST(url, data=xml)
|
2018-10-08 15:43:45 +02:00
|
|
|
except HTTPError:
|
2018-10-12 11:06:54 +02:00
|
|
|
self.logger.error('failed to post status to ' + url)
|
2018-10-08 14:05:25 +02:00
|
|
|
|
2018-10-05 15:26:10 +02:00
|
|
|
def update_staging_buildid(self, project, repository, buildid):
|
|
|
|
self.staging_projects[project]['id'] = buildid
|
|
|
|
self.staging_projects[project]['isos'] = self.gather_isos(project, repository)
|
2018-10-08 14:05:25 +02:00
|
|
|
self.update_staging_status(project)
|
2018-10-05 15:26:10 +02:00
|
|
|
|
|
|
|
def check_published_repo(self, project, repository, buildid):
|
|
|
|
if repository != 'images':
|
|
|
|
return
|
|
|
|
for p in self.staging_projects:
|
|
|
|
if project == p:
|
|
|
|
self.update_staging_buildid(project, repository, buildid)
|
|
|
|
|
|
|
|
def matching_project(self, iso):
|
|
|
|
for p in self.staging_projects:
|
|
|
|
if iso in self.staging_projects[p]['isos']:
|
|
|
|
return p
|
|
|
|
|
2018-10-08 14:05:25 +02:00
|
|
|
def map_openqa_result(self, job):
|
|
|
|
if job['result'] in ['passed', 'softfailed']:
|
2018-10-05 15:26:10 +02:00
|
|
|
return 'success'
|
2018-10-08 14:05:25 +02:00
|
|
|
if job['result'] == 'none':
|
|
|
|
return 'pending'
|
2018-10-05 15:26:10 +02:00
|
|
|
return 'failure'
|
|
|
|
|
2018-10-08 14:05:25 +02:00
|
|
|
def openqa_job_change(self, iso):
|
2018-10-05 15:26:10 +02:00
|
|
|
staging = self.matching_project(iso)
|
|
|
|
if not staging:
|
|
|
|
return
|
2018-10-08 14:05:25 +02:00
|
|
|
# we fetch all openqa jobs so we can avoid long job names
|
2019-12-10 02:01:13 +01:00
|
|
|
self.update_staging_status(staging)
|
2018-10-05 15:26:10 +02:00
|
|
|
|
2018-10-08 14:05:25 +02:00
|
|
|
def openqa_check_xml(self, url, state, name):
|
2018-10-05 15:26:10 +02:00
|
|
|
check = ET.Element('check')
|
|
|
|
se = ET.SubElement(check, 'url')
|
2018-10-08 14:05:25 +02:00
|
|
|
se.text = url
|
2018-10-05 15:26:10 +02:00
|
|
|
se = ET.SubElement(check, 'state')
|
|
|
|
se.text = state
|
|
|
|
se = ET.SubElement(check, 'name')
|
|
|
|
se.text = name
|
|
|
|
return ET.tostring(check)
|
|
|
|
|
2018-10-08 20:09:10 +02:00
|
|
|
|
2018-10-12 11:06:54 +02:00
|
|
|
class Listener(PubSubConsumer):
|
2022-11-23 14:57:47 +01:00
|
|
|
def __init__(self, amqp_prefix, openqa_url, dryrun):
|
2018-11-24 14:19:00 +01:00
|
|
|
super(Listener, self).__init__(amqp_prefix, logging.getLogger(__name__))
|
2018-10-05 15:26:10 +02:00
|
|
|
self.projects = []
|
|
|
|
self.amqp_prefix = amqp_prefix
|
2018-10-08 14:05:25 +02:00
|
|
|
self.openqa_url = openqa_url
|
2022-11-23 14:57:47 +01:00
|
|
|
self.dryrun = dryrun
|
2018-10-08 14:05:25 +02:00
|
|
|
self.openqa = OpenQA_Client(server=openqa_url)
|
2020-03-13 15:19:07 +01:00
|
|
|
self.projects_to_check = set()
|
2018-10-08 14:05:25 +02:00
|
|
|
|
2018-10-12 11:06:54 +02:00
|
|
|
def routing_keys(self):
|
|
|
|
ret = []
|
2020-03-16 08:54:07 +01:00
|
|
|
for suffix in ['.obs.repo.published', '.openqa.job.done',
|
2018-10-12 11:06:54 +02:00
|
|
|
'.openqa.job.create', '.openqa.job.restart']:
|
|
|
|
ret.append(self.amqp_prefix + suffix)
|
|
|
|
return ret
|
2018-10-05 15:26:10 +02:00
|
|
|
|
|
|
|
def add(self, project):
|
2018-10-12 11:28:42 +02:00
|
|
|
project.listener = self
|
2018-10-05 15:26:10 +02:00
|
|
|
self.projects.append(project)
|
|
|
|
|
2018-10-12 11:28:42 +02:00
|
|
|
def start_consuming(self):
|
|
|
|
# now we are (re-)connected to the bus and need to fetch the
|
|
|
|
# initial state
|
2020-03-13 15:19:07 +01:00
|
|
|
self.projects_to_check = set()
|
2018-10-12 11:28:42 +02:00
|
|
|
for project in self.projects:
|
|
|
|
self.logger.info('Fetching ISOs of %s', project.name)
|
2020-03-13 15:19:07 +01:00
|
|
|
for sproj in project.init():
|
|
|
|
self.projects_to_check.add((project, sproj))
|
2018-10-12 11:28:42 +02:00
|
|
|
self.logger.info('Finished fetching initial ISOs, listening')
|
|
|
|
super(Listener, self).start_consuming()
|
|
|
|
|
2020-03-13 15:19:07 +01:00
|
|
|
def interval(self):
|
|
|
|
if len(self.projects_to_check):
|
|
|
|
return 5
|
|
|
|
return super(Listener, self).interval()
|
|
|
|
|
|
|
|
def check_some_projects(self):
|
|
|
|
count = 0
|
|
|
|
limit = 5
|
|
|
|
while len(self.projects_to_check):
|
|
|
|
project, staging = self.projects_to_check.pop()
|
|
|
|
project.update_staging_status(staging)
|
|
|
|
count += 1
|
|
|
|
if count >= limit:
|
|
|
|
return
|
|
|
|
|
|
|
|
def still_alive(self):
|
|
|
|
self.check_some_projects()
|
|
|
|
super(Listener, self).still_alive()
|
|
|
|
|
2022-11-23 15:21:09 +01:00
|
|
|
def is_production_job(self, job):
|
|
|
|
if '/' in job['settings'].get('BUILD', '/') or \
|
|
|
|
'Development' in job['group']:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2018-10-08 14:05:25 +02:00
|
|
|
def jobs_for_iso(self, iso):
|
2022-11-23 17:23:30 +01:00
|
|
|
# Try ISO= matching first
|
2018-10-08 14:05:25 +02:00
|
|
|
values = {
|
|
|
|
'iso': iso,
|
|
|
|
'scope': 'current',
|
|
|
|
'latest': '1',
|
|
|
|
}
|
2021-09-16 16:13:34 +02:00
|
|
|
jobs = self.openqa.openqa_request('GET', 'jobs', values)['jobs']
|
2022-11-23 17:23:30 +01:00
|
|
|
|
|
|
|
# If no matches, try HDD_1=
|
|
|
|
if len(jobs) == 0:
|
|
|
|
del values['iso']
|
|
|
|
values['hdd_1'] = iso
|
|
|
|
jobs = self.openqa.openqa_request('GET', 'jobs', values)['jobs']
|
|
|
|
|
2022-02-18 14:44:50 +01:00
|
|
|
# Ignore PR verification runs (and jobs without 'BUILD')
|
2022-11-23 15:21:09 +01:00
|
|
|
return [job for job in jobs if self.is_production_job(job)]
|
2018-10-08 14:05:25 +02:00
|
|
|
|
2018-10-08 15:43:45 +02:00
|
|
|
def get_step_url(self, testurl, modulename):
|
|
|
|
failurl = testurl + '/modules/{!s}/fails'.format(quote_plus(modulename))
|
|
|
|
fails = requests.get(failurl).json()
|
|
|
|
failed_step = fails.get('first_failed_step', 1)
|
|
|
|
return "{!s}#step/{!s}/{:d}".format(testurl, modulename, failed_step)
|
|
|
|
|
2018-10-08 14:05:25 +02:00
|
|
|
def test_url(self, job):
|
2018-10-08 15:43:45 +02:00
|
|
|
url = self.openqa_url + ("/tests/%d" % job['id'])
|
|
|
|
if job['result'] == 'failed':
|
|
|
|
for module in job['modules']:
|
|
|
|
if module['result'] == 'failed':
|
|
|
|
return self.get_step_url(url, module['name'])
|
|
|
|
return url
|
2018-10-08 14:05:25 +02:00
|
|
|
|
2018-10-05 15:26:10 +02:00
|
|
|
def on_published_repo(self, payload):
|
|
|
|
for p in self.projects:
|
|
|
|
p.check_published_repo(str(payload['project']), str(payload['repo']), str(payload['buildid']))
|
2020-01-28 12:11:59 +01:00
|
|
|
|
2018-10-08 14:05:25 +02:00
|
|
|
def on_openqa_job(self, iso):
|
2019-01-30 16:06:11 +01:00
|
|
|
self.logger.debug('openqa_job_change %s', iso)
|
2018-10-05 15:26:10 +02:00
|
|
|
for p in self.projects:
|
2018-10-08 14:05:25 +02:00
|
|
|
p.openqa_job_change(iso)
|
2018-10-05 15:26:10 +02:00
|
|
|
|
|
|
|
def on_message(self, unused_channel, method, properties, body):
|
2019-05-20 11:43:04 +02:00
|
|
|
self.acknowledge_message(method.delivery_tag)
|
2018-10-05 15:26:10 +02:00
|
|
|
if method.routing_key == '{}.obs.repo.published'.format(amqp_prefix):
|
|
|
|
self.on_published_repo(json.loads(body))
|
2018-10-08 19:47:16 +02:00
|
|
|
elif re.search(r'.openqa.', method.routing_key):
|
2021-09-16 16:13:34 +02:00
|
|
|
data = json.loads(body)
|
|
|
|
if '/' in data.get('BUILD'):
|
2022-02-18 16:39:16 +01:00
|
|
|
return # Ignore PR verification runs
|
2022-11-23 17:23:30 +01:00
|
|
|
if data.get('ISO'):
|
|
|
|
self.on_openqa_job(data.get('ISO'))
|
|
|
|
elif data.get('HDD_1'):
|
|
|
|
self.on_openqa_job(data.get('HDD_1'))
|
2018-10-08 19:47:16 +02:00
|
|
|
else:
|
2018-10-12 11:06:54 +02:00
|
|
|
self.logger.warning("unknown rabbitmq message {}".format(method.routing_key))
|
2018-10-08 14:05:25 +02:00
|
|
|
|
2022-02-18 17:11:46 +01:00
|
|
|
|
2018-10-05 15:26:10 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Bot to sync openQA status to OBS')
|
2018-10-23 08:13:31 +02:00
|
|
|
parser.add_argument("--apiurl", '-A', type=str, help='API URL of OBS')
|
2018-10-05 15:26:10 +02:00
|
|
|
parser.add_argument('-d', '--debug', action='store_true', default=False,
|
|
|
|
help='enable debug information')
|
2022-11-23 14:57:47 +01:00
|
|
|
parser.add_argument('--dry', action='store_true', default=False,
|
|
|
|
help='do not perform changes')
|
2018-10-05 15:26:10 +02:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2022-02-18 17:28:13 +01:00
|
|
|
osc.conf.get_config(override_apiurl=args.apiurl)
|
2018-10-05 15:26:10 +02:00
|
|
|
osc.conf.config['debug'] = args.debug
|
|
|
|
|
2018-10-23 08:13:31 +02:00
|
|
|
apiurl = osc.conf.config['apiurl']
|
2018-10-05 15:26:10 +02:00
|
|
|
|
|
|
|
if apiurl.endswith('suse.de'):
|
|
|
|
amqp_prefix = 'suse'
|
2018-10-08 14:05:25 +02:00
|
|
|
openqa_url = 'https://openqa.suse.de'
|
2018-10-05 15:26:10 +02:00
|
|
|
else:
|
|
|
|
amqp_prefix = 'opensuse'
|
2018-10-08 14:05:25 +02:00
|
|
|
openqa_url = 'https://openqa.opensuse.org'
|
|
|
|
|
2018-10-12 11:06:54 +02:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
2022-11-23 14:57:47 +01:00
|
|
|
listener = Listener(amqp_prefix, openqa_url, dryrun=args.dry)
|
2018-10-10 20:59:16 +02:00
|
|
|
url = makeurl(apiurl, ['search', 'project', 'id'], {'match': 'attribute/@name="OSRT:OpenQAMapping"'})
|
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for entry in root.findall('project'):
|
2022-02-18 13:17:02 +01:00
|
|
|
listener.add(Project(entry.get('name')))
|
2018-10-05 15:26:10 +02:00
|
|
|
|
2018-11-03 18:28:57 +01:00
|
|
|
try:
|
2022-02-18 13:17:02 +01:00
|
|
|
listener.run(runtime=10800)
|
2018-11-03 18:28:57 +01:00
|
|
|
except KeyboardInterrupt:
|
2022-02-18 13:17:02 +01:00
|
|
|
listener.stop()
|