openSUSE-release-tools/totest-manager.py

1220 lines
44 KiB
Python
Raw Normal View History

#!/usr/bin/python2
# -*- coding: utf-8 -*-
#
# (C) 2014 mhrusecky@suse.cz, openSUSE.org
# (C) 2014 tchvatal@suse.cz, openSUSE.org
# (C) 2014 aplanas@suse.de, openSUSE.org
# (C) 2014 coolo@suse.de, openSUSE.org
# (C) 2017 okurz@suse.de, openSUSE.org
2018-05-15 10:09:32 +02:00
# (C) 2018 dheidler@suse.de, openSUSE.org
# Distribute under GPLv2 or GPLv3
from __future__ import print_function
2015-04-29 13:32:04 +02:00
import datetime
import json
2018-11-26 10:14:17 +01:00
import logging
import os
import re
2015-04-20 15:00:02 +02:00
import signal
2018-11-26 10:14:17 +01:00
import sys
import time
from xml.etree import cElementTree as ET
2018-11-26 10:14:17 +01:00
import cmdln
from openqa_client.client import OpenQA_Client
import osc
2018-11-26 10:14:17 +01:00
from osc.core import makeurl
2015-02-20 13:18:09 +01:00
from osclib.conf import Config
from osclib.stagingapi import StagingAPI
2018-11-26 10:14:17 +01:00
import pika
import yaml
try:
from urllib.error import HTTPError
except ImportError:
# python 2.x
from urllib2 import HTTPError
2017-10-10 08:26:21 +02:00
logger = logging.getLogger()
# QA Results
QA_INPROGRESS = 1
QA_FAILED = 2
QA_PASSED = 3
2017-10-10 08:26:21 +02:00
class NotFoundException(Exception):
pass
class ImageProduct(object):
def __init__(self, package, archs):
self.package = package
self.archs = archs
class ToTestBase(object):
"""Base class to store the basic interface"""
product_repo = 'images'
product_arch = 'local'
livecd_repo = 'images'
totest_container_repo = 'containers'
main_products = []
ftp_products = []
container_products = []
livecd_products = []
image_products = []
def __init__(self, project, dryrun=False, norelease=False, api_url=None, openqa_server='https://openqa.opensuse.org', test_subproject=None):
self.project = project
self.dryrun = dryrun
2018-06-20 09:54:48 +02:00
self.norelease = norelease
if not api_url:
api_url = osc.conf.config['apiurl']
self.api = StagingAPI(api_url, project=project)
self.openqa_server = openqa_server
if not test_subproject:
test_subproject = 'ToTest'
self.test_project = '%s:%s' % (self.project, test_subproject)
self.openqa = OpenQA_Client(server=openqa_server)
self.load_issues_to_ignore()
self.project_base = project.split(':')[0]
self.update_pinned_descr = False
2018-05-15 10:09:32 +02:00
self.amqp_url = osc.conf.config.get('ttm_amqp_url')
def load_issues_to_ignore(self):
text = self.api.attribute_value_load('IgnoredIssues')
if text:
root = yaml.load(text)
self.issues_to_ignore = root.get('last_seen')
else:
self.issues_to_ignore = dict()
def save_issues_to_ignore(self):
if self.dryrun:
return
text = yaml.dump({'last_seen': self.issues_to_ignore}, default_flow_style=False)
self.api.attribute_value_save('IgnoredIssues', text)
def openqa_group(self):
return self.project
def iso_prefix(self):
return self.project
def jobs_num(self):
2015-10-20 06:51:40 +02:00
return 70
def current_version(self):
2015-05-14 08:34:45 +02:00
return self.release_version()
def binaries_of_product(self, project, product, repo=None, arch=None):
if repo is None:
repo = self.product_repo
if arch is None:
arch = self.product_arch
url = self.api.makeurl(['build', project, repo, arch, product])
try:
f = self.api.retried_GET(url)
2018-11-16 08:32:25 +01:00
except HTTPError:
return []
ret = []
root = ET.parse(f).getroot()
for binary in root.findall('binary'):
ret.append(binary.get('filename'))
return ret
def get_current_snapshot(self):
"""Return the current snapshot in the test project"""
for binary in self.binaries_of_product(self.test_project, '000product:%s-cd-mini-%s' % (self.project_base, self.arch())):
result = re.match(r'%s-%s-NET-.*-Snapshot(.*)-Media.iso' % (self.project_base, self.iso_prefix()),
binary)
if result:
return result.group(1)
return None
def ftp_build_version(self, project, tree, base=None):
if not base:
base = self.project_base
for binary in self.binaries_of_product(project, tree):
result = re.match(r'%s.*Build(.*)-Media1.report' % base, binary)
2017-02-02 17:41:24 +01:00
if result:
return result.group(1)
2017-10-10 08:26:21 +02:00
raise NotFoundException("can't find %s ftp version" % project)
def iso_build_version(self, project, tree, base=None, repo=None, arch=None):
if not base:
base = self.project_base
for binary in self.binaries_of_product(project, tree, repo=repo, arch=arch):
2018-04-06 15:28:16 +02:00
result = re.match(r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz)', binary)
2017-02-02 17:41:24 +01:00
if result:
return result.group(1)
2017-10-10 08:26:21 +02:00
raise NotFoundException("can't find %s iso version" % project)
def release_version(self):
url = self.api.makeurl(['build', self.project, 'standard', self.arch(),
'000release-packages:%s-release' % self.project_base])
f = self.api.retried_GET(url)
root = ET.parse(f).getroot()
for binary in root.findall('binary'):
binary = binary.get('filename', '')
result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary)
if result:
return result.group(1)
2017-10-10 08:26:21 +02:00
raise NotFoundException("can't find %s version" % self.project)
def current_qa_version(self):
return self.api.pseudometa_file_load('version_totest')
def find_openqa_results(self, snapshot):
"""Return the openqa jobs of a given snapshot and filter out the
cloned jobs
"""
url = makeurl(self.openqa_server,
['api', 'v1', 'jobs'], {'group': self.openqa_group(), 'build': snapshot, 'latest': 1})
f = self.api.retried_GET(url)
jobs = []
for job in json.load(f)['jobs']:
if job['clone_id'] or job['result'] == 'obsoleted':
continue
job['name'] = job['name'].replace(snapshot, '')
jobs.append(job)
return jobs
def _result2str(self, result):
if result == QA_INPROGRESS:
return 'inprogress'
elif result == QA_FAILED:
return 'failed'
else:
return 'passed'
def find_failed_module(self, testmodules):
# print json.dumps(testmodules, sort_keys=True, indent=4)
for module in testmodules:
if module['result'] != 'failed':
continue
flags = module['flags']
if 'fatal' in flags or 'important' in flags:
return module['name']
break
logger.info('%s %s %s' %
(module['name'], module['result'], module['flags']))
def update_openqa_status_message(self):
url = makeurl(self.openqa_server,
['api', 'v1', 'job_groups'])
f = self.api.retried_GET(url)
job_groups = json.load(f)
group_id = 0
for jg in job_groups:
if jg['name'] == self.openqa_group():
group_id = jg['id']
break
if not group_id:
logger.debug('No openQA group id found for status comment update, ignoring')
return
pinned_ignored_issue = 0
issues = ' , '.join(self.issues_to_ignore.keys())
status_flag = 'publishing' if self.status_for_openqa['is_publishing'] else \
'preparing' if self.status_for_openqa['can_release'] else \
'testing' if self.status_for_openqa['snapshotable'] else \
'building'
status_msg = "tag:{}:{}:{}".format(self.status_for_openqa['new_snapshot'], status_flag, status_flag)
msg = "pinned-description: Ignored issues\r\n\r\n{}\r\n\r\n{}".format(issues, status_msg)
data = {'text': msg}
url = makeurl(self.openqa_server,
['api', 'v1', 'groups', str(group_id), 'comments'])
f = self.api.retried_GET(url)
comments = json.load(f)
for comment in comments:
if comment['userName'] == 'ttm' and \
comment['text'].startswith('pinned-description: Ignored issues'):
pinned_ignored_issue = comment['id']
logger.debug('Writing openQA status message: {}'.format(data))
if not self.dryrun:
if pinned_ignored_issue:
self.openqa.openqa_request(
'PUT', 'groups/%s/comments/%d' % (group_id, pinned_ignored_issue), data=data)
else:
self.openqa.openqa_request(
'POST', 'groups/%s/comments' % group_id, data=data)
def overall_result(self, snapshot):
"""Analyze the openQA jobs of a given snapshot Returns a QAResult"""
if snapshot is None:
return QA_FAILED
jobs = self.find_openqa_results(snapshot)
2018-05-15 10:09:32 +02:00
self.failed_relevant_jobs = []
self.failed_ignored_jobs = []
if len(jobs) < self.jobs_num(): # not yet scheduled
2016-05-09 17:12:13 +02:00
logger.warning('we have only %s jobs' % len(jobs))
return QA_INPROGRESS
in_progress = False
for job in jobs:
# print json.dumps(job, sort_keys=True, indent=4)
if job['result'] in ('failed', 'incomplete', 'skipped', 'user_cancelled', 'obsoleted', 'parallel_failed'):
# print json.dumps(job, sort_keys=True, indent=4), jobname
url = makeurl(self.openqa_server,
['api', 'v1', 'jobs', str(job['id']), 'comments'])
f = self.api.retried_GET(url)
comments = json.load(f)
refs = set()
labeled = 0
to_ignore = False
for comment in comments:
for ref in comment['bugrefs']:
refs.add(str(ref))
if comment['userName'] == 'ttm' and comment['text'] == 'label:unknown_failure':
labeled = comment['id']
if re.search(r'@ttm:? ignore', comment['text']):
to_ignore = True
# to_ignore can happen with or without refs
ignored = True if to_ignore else len(refs) > 0
build_nr = str(job['settings']['BUILD'])
for ref in refs:
if ref not in self.issues_to_ignore:
if to_ignore:
self.issues_to_ignore[ref] = build_nr
self.update_pinned_descr = True
else:
ignored = False
else:
# update reference
self.issues_to_ignore[ref] = build_nr
if ignored:
2018-05-15 10:09:32 +02:00
self.failed_ignored_jobs.append(job['id'])
if labeled:
text = 'Ignored issue' if len(refs) > 0 else 'Ignored failure'
# remove flag - unfortunately can't delete comment unless admin
data = {'text': text}
if self.dryrun:
logger.info("Would label {} with: {}".format(job['id'], text))
else:
self.openqa.openqa_request(
'PUT', 'jobs/%s/comments/%d' % (job['id'], labeled), data=data)
2018-05-15 10:09:32 +02:00
logger.info("job %s failed, but was ignored", job['name'])
else:
2018-05-15 10:09:32 +02:00
self.failed_relevant_jobs.append(job['id'])
if not labeled and len(refs) > 0:
data = {'text': 'label:unknown_failure'}
if self.dryrun:
logger.info("Would label {} as unknown".format(job['id']))
else:
self.openqa.openqa_request(
'POST', 'jobs/%s/comments' % job['id'], data=data)
joburl = '%s/tests/%s' % (self.openqa_server, job['id'])
2018-05-15 10:09:32 +02:00
logger.info("job %s failed, see %s", job['name'], joburl)
elif job['result'] == 'passed' or job['result'] == 'softfailed':
continue
elif job['result'] == 'none':
if job['state'] != 'cancelled':
in_progress = True
else:
raise Exception(job['result'])
self.save_issues_to_ignore()
2018-05-15 10:09:32 +02:00
if len(self.failed_relevant_jobs) > 0:
return QA_FAILED
if in_progress:
return QA_INPROGRESS
return QA_PASSED
def all_repos_done(self, project, codes=None):
"""Check the build result of the project and only return True if all
repos of that project are either published or unpublished
"""
# coolo's experience says that 'finished' won't be
# sufficient here, so don't try to add it :-)
codes = ['published', 'unpublished'] if not codes else codes
url = self.api.makeurl(
['build', project, '_result'], {'code': 'failed'})
f = self.api.retried_GET(url)
root = ET.parse(f).getroot()
2015-04-29 13:31:52 +02:00
ready = True
for repo in root.findall('result'):
# ignore ports. 'factory' is used by arm for repos that are not
# meant to use the totest manager.
if repo.get('repository') in ('ports', 'factory', 'images_staging'):
2014-10-31 11:11:52 +01:00
continue
if repo.get('dirty', '') == 'true':
logger.info('%s %s %s -> %s' % (repo.get('project'),
repo.get('repository'), repo.get('arch'), 'dirty'))
2015-04-29 13:31:52 +02:00
ready = False
if repo.get('code') not in codes:
logger.info('%s %s %s -> %s' % (repo.get('project'),
repo.get('repository'), repo.get('arch'), repo.get('code')))
2015-04-29 13:31:52 +02:00
ready = False
return ready
def maxsize_for_package(self, package):
if re.match(r'.*-mini-.*', package):
return 737280000 # a CD needs to match
if re.match(r'.*-dvd5-.*', package):
return 4700372992 # a DVD needs to match
if re.match(r'livecd-x11', package):
return 681574400 # not a full CD
if re.match(r'livecd-.*', package):
return 999999999 # a GB stick
if re.match(r'.*-(dvd9-dvd|cd-DVD)-.*', package):
2014-10-14 14:44:27 +02:00
return 8539996159
if re.match(r'.*-ftp-(ftp|POOL)-', package):
return None
# docker container has no size limit
if re.match(r'opensuse-.*-image.*', package):
return None
if '-Addon-NonOss-ftp-ftp' in package:
return None
if 'JeOS' in package:
return 4700372992
raise Exception('No maxsize for {}'.format(package))
def package_ok(self, project, package, repository, arch):
"""Checks one package in a project and returns True if it's succeeded
"""
query = {'package': package, 'repository': repository, 'arch': arch}
url = self.api.makeurl(['build', project, '_result'], query)
f = self.api.retried_GET(url)
root = ET.parse(f).getroot()
# [@code!='succeeded'] is not supported by ET
failed = [status for status in root.findall("result/status") if status.get('code') != 'succeeded']
2018-11-26 10:14:33 +01:00
if any(failed):
logger.info(
'%s %s %s %s -> %s' % (project, package, repository, arch, failed[0].get('code')))
return False
2018-11-26 10:14:33 +01:00
if not len(root.findall('result/status[@code="succeeded"]')):
logger.info('No "succeeded" for %s %s %s %s' % (project, package, repository, arch))
return False
maxsize = self.maxsize_for_package(package)
if not maxsize:
return True
url = self.api.makeurl(['build', project, repository, arch, package])
f = self.api.retried_GET(url)
root = ET.parse(f).getroot()
for binary in root.findall('binary'):
if not binary.get('filename', '').endswith('.iso'):
continue
isosize = int(binary.get('size', 0))
if isosize > maxsize:
logger.error('%s %s %s %s: %s' % (
project, package, repository, arch, 'too large by %s bytes' % (isosize - maxsize)))
return False
return True
2017-02-02 17:41:24 +01:00
def is_snapshottable(self):
"""Check various conditions required for factory to be snapshotable
"""
if not self.all_repos_done(self.project):
return False
for product in self.ftp_products + self.main_products:
if not self.package_ok(self.project, product, self.product_repo, self.product_arch):
return False
for product in self.image_products + self.container_products:
for arch in product.archs:
if not self.package_ok(self.project, product.package, self.product_repo, arch):
return False
if len(self.livecd_products):
if not self.all_repos_done('%s:Live' % self.project):
return False
for product in self.livecd_products:
for arch in product.archs:
if not self.package_ok('%s:Live' % self.project, product.package,
self.product_repo, arch):
return False
return True
def _release_package(self, project, package, set_release=None, repository=None,
target_project=None, target_repository=None):
query = {'cmd': 'release'}
if set_release:
query['setrelease'] = set_release
if repository is not None:
query['repository'] = repository
if target_project is not None:
# Both need to be set
query['target_project'] = target_project
query['target_repository'] = target_repository
baseurl = ['source', project, package]
url = self.api.makeurl(baseurl, query=query)
if self.dryrun or self.norelease:
logger.info("release %s/%s (%s)" % (project, package, query))
else:
self.api.retried_POST(url)
2017-10-10 08:26:21 +02:00
def _release(self, set_release=None):
for product in self.ftp_products:
self._release_package(self.project, product, repository=self.product_repo)
2017-10-10 08:26:21 +02:00
for cd in self.livecd_products:
self._release_package('%s:Live' %
self.project, cd.package, set_release=set_release,
repository=self.livecd_repo)
2017-10-10 08:26:21 +02:00
for image in self.image_products:
self._release_package(self.project, image.package, set_release=set_release,
repository=self.product_repo)
2017-10-10 08:26:21 +02:00
for cd in self.main_products:
self._release_package(self.project, cd, set_release=set_release,
repository=self.product_repo)
2017-10-10 08:26:21 +02:00
for container in self.container_products:
# Containers are built in the same repo as other image products,
# but released into a different repo in :ToTest
self._release_package(self.project, container.package, repository=self.product_repo,
target_project=self.test_project,
target_repository=self.totest_container_repo)
2017-10-10 08:26:21 +02:00
def update_totest(self, snapshot=None):
2017-02-02 17:41:24 +01:00
release = 'Snapshot%s' % snapshot if snapshot else None
2016-05-09 17:12:13 +02:00
logger.info('Updating snapshot %s' % snapshot)
if not (self.dryrun or self.norelease):
self.api.switch_flag_in_prj(self.test_project, flag='publish', state='disable',
repository=self.product_repo)
self._release(set_release=release)
def publish_factory_totest(self):
logger.info('Publish test project content')
if not (self.dryrun or self.norelease):
self.api.switch_flag_in_prj(
self.test_project, flag='publish', state='enable',
repository=self.product_repo)
if self.container_products:
logger.info('Releasing container products from ToTest')
for container in self.container_products:
self._release_package(self.test_project, container.package,
repository=self.totest_container_repo)
def totest_is_publishing(self):
"""Find out if the publishing flag is set in totest's _meta"""
url = self.api.makeurl(
['source', self.test_project, '_meta'])
f = self.api.retried_GET(url)
root = ET.parse(f).getroot()
if not root.find('publish'): # default true
return True
for flag in root.find('publish'):
if flag.get('repository', None) not in [None, self.product_repo]:
continue
if flag.get('arch', None):
continue
if flag.tag == 'enable':
return True
return False
def totest(self):
2017-10-10 08:26:21 +02:00
try:
current_snapshot = self.get_current_snapshot()
except NotFoundException as e:
# nothing in test project (yet)
2017-10-10 08:26:21 +02:00
logger.warn(e)
current_snapshot = None
new_snapshot = self.current_version()
self.update_pinned_descr = False
current_result = self.overall_result(current_snapshot)
current_qa_version = self.current_qa_version()
logger.info('current_snapshot %s: %s' %
(current_snapshot, self._result2str(current_result)))
2016-05-09 17:12:13 +02:00
logger.debug('new_snapshot %s', new_snapshot)
logger.debug('current_qa_version %s', current_qa_version)
2017-02-02 17:41:24 +01:00
snapshotable = self.is_snapshottable()
2016-05-09 17:12:13 +02:00
logger.debug("snapshotable: %s", snapshotable)
can_release = ((current_snapshot is None or current_result != QA_INPROGRESS) and snapshotable)
2015-02-20 13:18:09 +01:00
# not overwriting
if new_snapshot == current_qa_version:
2016-05-09 17:12:13 +02:00
logger.debug("no change in snapshot version")
can_release = False
elif not self.all_repos_done(self.test_project):
2016-05-09 17:12:13 +02:00
logger.debug("not all repos done, can't release")
# the repos have to be done, otherwise we better not touch them
# with a new release
can_release = False
2018-05-15 10:09:32 +02:00
self.send_amqp_event(current_snapshot, current_result)
can_publish = (current_result == QA_PASSED)
# already published
totest_is_publishing = self.totest_is_publishing()
if totest_is_publishing:
2016-05-09 17:12:13 +02:00
logger.debug("totest already publishing")
can_publish = False
if self.update_pinned_descr:
self.status_for_openqa = {
'current_snapshot': current_snapshot,
'new_snapshot': new_snapshot,
'snapshotable': snapshotable,
'can_release': can_release,
'is_publishing': totest_is_publishing,
}
self.update_openqa_status_message()
if can_publish:
if current_qa_version == current_snapshot:
self.publish_factory_totest()
self.write_version_to_dashboard("snapshot", current_snapshot)
can_release = False # we have to wait
else:
# We reached a very bad status: openQA testing is 'done', but not of the same version
# currently in test project. This can happen when 'releasing' the
# product failed
raise Exception("Publishing stopped: tested version (%s) does not match version in test project (%s)"
% (current_qa_version, current_snapshot))
if can_release:
2014-09-16 09:12:08 +02:00
self.update_totest(new_snapshot)
2015-11-20 07:05:49 +01:00
self.write_version_to_dashboard("totest", new_snapshot)
2018-05-15 10:09:32 +02:00
def send_amqp_event(self, current_snapshot, current_result):
if not self.amqp_url:
logger.debug('No ttm_amqp_url configured in oscrc - skipping amqp event emission')
return
logger.debug('Sending AMQP message')
inf = re.sub(r"ed$", '', self._result2str(current_result))
msg_topic = '%s.ttm.build.%s' % (self.project_base.lower(), inf)
msg_body = json.dumps({
'build': current_snapshot,
'project': self.project,
'failed_jobs': {
'relevant': self.failed_relevant_jobs,
'ignored': self.failed_ignored_jobs,
}
})
# send amqp event
tries = 7 # arbitrary
for t in range(tries):
try:
notify_connection = pika.BlockingConnection(pika.URLParameters(self.amqp_url))
notify_channel = notify_connection.channel()
notify_channel.exchange_declare(exchange='pubsub', exchange_type='topic', passive=True, durable=True)
notify_channel.basic_publish(exchange='pubsub', routing_key=msg_topic, body=msg_body)
notify_connection.close()
break
except pika.exceptions.ConnectionClosed as e:
logger.warn('Sending AMQP event did not work: %s. Retrying try %s out of %s' % (e, t, tries))
else:
logger.error('Could not send out AMQP event for %s tries, aborting.' % tries)
def release(self):
new_snapshot = self.current_version()
self.update_totest(new_snapshot)
def write_version_to_dashboard(self, target, version):
if not (self.dryrun or self.norelease):
self.api.pseudometa_file_ensure('version_%s' % target, version, comment='Update version')
2015-02-20 13:18:09 +01:00
2017-10-10 08:26:21 +02:00
class ToTestBaseNew(ToTestBase):
# whether all medias need to have the same build number
need_same_build_number = True
# whether to set a snapshot number on release
set_snapshot_number = False
2017-10-10 08:26:21 +02:00
"""Base class for new product builder"""
2017-10-10 08:26:21 +02:00
def _release(self, set_release=None):
query = {'cmd': 'release'}
package = '000product'
project = self.project
2017-10-10 08:26:21 +02:00
if set_release:
query['setrelease'] = set_release
baseurl = ['source', project, package]
url = self.api.makeurl(baseurl, query=query)
if self.dryrun or self.norelease:
2017-10-10 08:26:21 +02:00
logger.info("release %s/%s (%s)" % (project, package, set_release))
else:
self.api.retried_POST(url)
# XXX still legacy
for cd in self.livecd_products:
self._release_package('%s:Live' %
self.project, cd.package, set_release=set_release)
2017-10-10 08:26:21 +02:00
def release_version(self):
url = self.api.makeurl(['build', self.project, 'standard', self.arch(),
'000product:%s-release' % self.project_base])
2017-10-10 08:26:21 +02:00
f = self.api.retried_GET(url)
root = ET.parse(f).getroot()
for binary in root.findall('binary'):
binary = binary.get('filename', '')
result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary)
if result:
return result.group(1)
raise NotFoundException("can't find %s release version" % self.project)
def current_version(self):
return self.iso_build_version(self.project, self.main_products[0])
def is_snapshottable(self):
ret = super(ToTestBaseNew, self).is_snapshottable()
if ret and self.need_same_build_number:
# make sure all medias have the same build number
builds = set()
for p in self.ftp_products:
if 'Addon-NonOss' in p:
# XXX: don't care about nonoss atm.
continue
builds.add(self.ftp_build_version(self.project, p))
for p in self.main_products:
builds.add(self.iso_build_version(self.project, p))
for p in self.livecd_products + self.image_products:
for arch in p.archs:
builds.add(self.iso_build_version(self.project, p.package,
2018-11-26 10:13:25 +01:00
arch=arch))
ret = (len(builds) == 1)
if ret is False:
logger.debug("not all medias have the same build number")
return ret
def update_totest(self, snapshot):
if not self.set_snapshot_number:
snapshot = None
# omit snapshot, we don't want to rename on release
super(ToTestBaseNew, self).update_totest(snapshot)
2017-10-10 08:26:21 +02:00
class ToTestFactory(ToTestBase):
main_products = ['000product:openSUSE-dvd5-dvd-i586',
'000product:openSUSE-dvd5-dvd-x86_64',
'000product:openSUSE-cd-mini-i586',
'000product:openSUSE-cd-mini-x86_64',
'000product:openSUSE-Tumbleweed-Kubic-dvd5-dvd-x86_64']
ftp_products = ['000product:openSUSE-ftp-ftp-i586_x86_64',
'000product:openSUSE-Addon-NonOss-ftp-ftp-i586_x86_64']
livecd_products = [ImageProduct('livecd-tumbleweed-kde', ['i586', 'x86_64']),
ImageProduct('livecd-tumbleweed-gnome', ['i586', 'x86_64']),
ImageProduct('livecd-tumbleweed-x11', ['i586', 'x86_64'])]
container_products = [ImageProduct('opensuse-tumbleweed-image:docker', ['i586', 'x86_64']),
ImageProduct('kubic-kured-image', ['x86_64']),
ImageProduct('kubic-pause-image', ['i586', 'x86_64'])]
2018-11-14 09:54:29 +01:00
image_products = [
ImageProduct('opensuse-tumbleweed-image:lxc', ['i586', 'x86_64']),
ImageProduct('openSUSE-Tumbleweed-JeOS:MS-HyperV', ['x86_64']),
ImageProduct('openSUSE-Tumbleweed-JeOS:OpenStack-Cloud', ['x86_64']),
ImageProduct('openSUSE-Tumbleweed-JeOS:VMware', ['x86_64']),
ImageProduct('openSUSE-Tumbleweed-JeOS:XEN', ['x86_64']),
ImageProduct('openSUSE-Tumbleweed-JeOS:kvm-and-xen', ['x86_64']),
]
2015-07-01 11:48:23 +02:00
def __init__(self, *args, **kwargs):
ToTestBase.__init__(self, *args, **kwargs)
def openqa_group(self):
return 'openSUSE Tumbleweed'
def iso_prefix(self):
return 'Tumbleweed'
def arch(self):
return 'x86_64'
class ToTestFactoryPowerPC(ToTestBase):
[ttm] remove ppc64 (BE) for TW bypass boo#1112920 exemple of output after change: ``` $./totest-manager.py --dry --debug run Factory:PowerPC 2>&1 2018-11-05 09:59:23,874 - totest-manager:299 INFO job opensuse-Tumbleweed-DVD-ppc64-Build-RAID5@ppc64 failed, but was ignored 2018-11-05 09:59:24,069 - totest-manager:299 INFO job opensuse-Tumbleweed-DVD-ppc64-Build-RAID10@ppc64 failed, but was ignored 2018-11-05 09:59:24,259 - totest-manager:299 INFO job opensuse-Tumbleweed-DVD-ppc64le-Build-extra_tests_in_textmode@ppc64le failed, but was ignored 2018-11-05 09:59:24,480 - totest-manager:299 INFO job opensuse-Tumbleweed-DVD-ppc64le-Build-gnome@ppc64le failed, but was ignored 2018-11-05 09:59:24,679 - totest-manager:311 INFO job opensuse-Tumbleweed-DVD-ppc64le-Build-extra_tests_on_gnome@ppc64le failed, see https://openqa.opensuse.org/tests/781048 2018-11-05 09:59:25,104 - totest-manager:524 INFO current_snapshot 20181022: failed 2018-11-05 09:59:25,104 - totest-manager:525 DEBUG new_snapshot 20181101 2018-11-05 09:59:25,104 - totest-manager:526 DEBUG current_qa_version 20181022 2018-11-05 09:59:28,686 - totest-manager:529 DEBUG snapshotable: True 2018-11-05 09:59:28,927 - totest-manager:580 DEBUG No ttm_amqp_url configured in oscrc - skipping amqp event emission 2018-11-05 09:59:29,210 - totest-manager:549 DEBUG totest already publishing 2018-11-05 09:59:29,210 - totest-manager:482 INFO Updating snapshot 20181101 2018-11-05 09:59:29,210 - totest-manager:465 INFO release openSUSE:Factory:PowerPC/000product:openSUSE-ftp-ftp-ppc64_ppc64le (None) 2018-11-05 09:59:29,210 - totest-manager:465 INFO release openSUSE:Factory:PowerPC/000product:openSUSE-dvd5-dvd-ppc64le (Snapshot20181101) 2018-11-05 09:59:29,210 - totest-manager:465 INFO release openSUSE:Factory:PowerPC/000product:openSUSE-cd-mini-ppc64le (Snapshot20181101) ``` TODO: is is sufficient to have ppc64 (BE) iso build failure to be ignored by OBS as per bug 1112920 https://bugzilla.suse.com/show_bug.cgi?id=1112920 and continue to submit ppc64le isos to be submitted to openQA and released independently from ppc64 ? Signed-off-by: Michel Normand <normand@linux.vnet.ibm.com>
2018-11-05 10:03:14 +01:00
main_products = ['000product:openSUSE-dvd5-dvd-ppc64le',
'000product:openSUSE-cd-mini-ppc64le']
ftp_products = ['000product:openSUSE-ftp-ftp-ppc64_ppc64le']
2018-11-14 09:54:29 +01:00
image_products = [ImageProduct('opensuse-tumbleweed-image:lxc', ['ppc64le'])]
container_products = [ImageProduct('opensuse-tumbleweed-image:docker', ['ppc64le']),
ImageProduct('kubic-kured-image', ['ppc64le']),
ImageProduct('kubic-pause-image', ['ppc64le'])]
2018-11-14 09:54:29 +01:00
2015-07-01 11:48:23 +02:00
def __init__(self, *args, **kwargs):
ToTestBase.__init__(self, *args, **kwargs)
def openqa_group(self):
return 'openSUSE Tumbleweed PowerPC'
def arch(self):
return 'ppc64le'
def iso_prefix(self):
return 'Tumbleweed'
def jobs_num(self):
return 4
2017-01-20 11:46:42 +01:00
class ToTestFactoryzSystems(ToTestBase):
main_products = ['000product:openSUSE-dvd5-dvd-s390x',
'000product:openSUSE-cd-mini-s390x']
2017-01-20 11:46:42 +01:00
ftp_products = ['000product:openSUSE-ftp-ftp-s390x']
2017-01-20 11:46:42 +01:00
def __init__(self, *args, **kwargs):
ToTestBase.__init__(self, *args, **kwargs)
def openqa_group(self):
return 'openSUSE Tumbleweed s390x'
2017-01-20 11:46:42 +01:00
def arch(self):
return 's390x'
def iso_prefix(self):
return 'Tumbleweed'
def jobs_num(self):
return 1
2017-01-20 11:46:42 +01:00
2015-04-08 15:24:36 +02:00
class ToTestFactoryARM(ToTestFactory):
main_products = ['000product:openSUSE-cd-mini-aarch64',
'000product:openSUSE-dvd5-dvd-aarch64',
'000product:openSUSE-Tumbleweed-Kubic-dvd5-dvd-aarch64']
2015-04-08 15:24:36 +02:00
ftp_products = ['000product:openSUSE-ftp-ftp-aarch64',
'000product:openSUSE-ftp-ftp-armv7hl',
'000product:openSUSE-ftp-ftp-armv6hl']
2015-04-08 15:24:36 +02:00
livecd_products = [ImageProduct('JeOS', ['armv7l'])]
2015-04-08 15:24:36 +02:00
2018-11-14 09:54:29 +01:00
image_products = [ImageProduct('opensuse-tumbleweed-image:lxc', ['armv6l', 'armv7l', 'aarch64'])]
container_products = [ImageProduct('opensuse-tumbleweed-image:docker', ['aarch64']),
ImageProduct('kubic-kured-image', ['aarch64']),
ImageProduct('kubic-pause-image', ['aarch64'])]
2018-11-14 09:54:29 +01:00
# JeOS doesn't follow build numbers of main isos
need_same_build_number = False
2015-07-01 11:48:23 +02:00
def __init__(self, *args, **kwargs):
ToTestFactory.__init__(self, *args, **kwargs)
2015-04-08 15:24:36 +02:00
def openqa_group(self):
return 'openSUSE Tumbleweed AArch64'
2015-04-08 15:24:36 +02:00
def arch(self):
return 'aarch64'
def jobs_num(self):
return 2
2015-02-20 13:18:09 +01:00
2018-06-20 17:16:22 +02:00
class ToTest151(ToTestBaseNew):
main_products = [
2017-10-10 08:26:21 +02:00
'000product:openSUSE-cd-mini-x86_64',
'000product:openSUSE-dvd5-dvd-x86_64',
]
2017-10-10 08:26:21 +02:00
ftp_products = ['000product:openSUSE-ftp-ftp-x86_64',
2017-11-09 14:55:40 +01:00
'000product:openSUSE-Addon-NonOss-ftp-ftp-x86_64'
2017-10-10 08:26:21 +02:00
]
2017-02-02 17:41:24 +01:00
def openqa_group(self):
2018-07-17 15:44:39 +08:00
return 'openSUSE Leap 15'
2017-02-02 17:41:24 +01:00
def get_current_snapshot(self):
return self.iso_build_version(self.project + ':ToTest', self.main_products[0])
class ToTest151ARM(ToTest151):
2018-07-10 13:26:02 +02:00
main_products = [
'000product:openSUSE-cd-mini-aarch64',
'000product:openSUSE-dvd5-dvd-aarch64',
]
ftp_products = ['000product:openSUSE-ftp-ftp-aarch64',
'000product:openSUSE-ftp-ftp-armv7hl',
]
livecd_products = [ImageProduct('JeOS', ['armv7l'])]
2018-07-10 13:26:02 +02:00
# JeOS doesn't follow build numbers of main isos
need_same_build_number = False
2018-07-10 13:26:02 +02:00
def openqa_group(self):
return 'openSUSE Leap 15 AArch64'
def jobs_num(self):
return 10
class ToTest151PowerPC(ToTest151):
main_products = [
'000product:openSUSE-cd-mini-ppc64le',
'000product:openSUSE-dvd5-dvd-ppc64le',
]
ftp_products = ['000product:openSUSE-ftp-ftp-ppc64le',
]
def openqa_group(self):
return 'openSUSE Leap 15 PowerPC'
def jobs_num(self):
return 10
2017-12-17 10:00:24 +01:00
class ToTest150Ports(ToTestBaseNew):
main_products = [
'000product:openSUSE-cd-mini-aarch64',
'000product:openSUSE-dvd5-dvd-aarch64',
]
ftp_products = ['000product:openSUSE-ftp-ftp-aarch64',
'000product:openSUSE-ftp-ftp-armv7hl',
2017-12-17 10:00:24 +01:00
]
# Leap 15.0 Ports still need to update snapshot
set_snapshot_number = True
# product_repo openqa_group jobs_num values are specific to aarch64
# TODO: How to handle the other entries of main_products ?
product_repo = 'images_arm'
2017-12-17 10:00:24 +01:00
def openqa_group(self):
return 'openSUSE Leap 15.0 AArch64'
def jobs_num(self):
return 10
2017-12-17 10:00:24 +01:00
def get_current_snapshot(self):
return self.iso_build_version(self.project + ':ToTest', self.main_products[0])
class ToTest150Images(ToTestBaseNew):
image_products = [
ImageProduct('livecd-leap-gnome', ['x86_64']),
ImageProduct('livecd-leap-kde', ['x86_64']),
ImageProduct('livecd-leap-x11', ['x86_64']),
ImageProduct('opensuse-leap-image:docker', ['x86_64']),
ImageProduct('opensuse-leap-image:lxc', ['x86_64']),
ImageProduct('kiwi-templates-Leap15-JeOS:MS-HyperV', ['x86_64']),
ImageProduct('kiwi-templates-Leap15-JeOS:OpenStack-Cloud', ['x86_64']),
ImageProduct('kiwi-templates-Leap15-JeOS:VMware', ['x86_64']),
ImageProduct('kiwi-templates-Leap15-JeOS:XEN', ['x86_64']),
ImageProduct('kiwi-templates-Leap15-JeOS:kvm-and-xen', ['x86_64']),
]
# docker image has a different number
need_same_build_number = False
set_snapshot_number = True
def openqa_group(self):
return 'openSUSE Leap 15.0 Images'
def current_qa_version(self):
return self.api.pseudometa_file_load('version_totest_images')
def write_version_to_dashboard(self, target, version):
super(ToTest150Images, self).write_version_to_dashboard('{}_images'.format(target), version)
def current_version(self):
return self.iso_build_version(self.project, self.image_products[0].package,
arch=self.image_products[0].archs[0])
def get_current_snapshot(self):
return self.iso_build_version(self.project + ':ToTest', self.image_products[0].package,
arch=self.image_products[0].archs[0])
def _release(self, set_release=None):
ToTestBase._release(self, set_release)
def jobs_num(self):
return 13
2017-12-17 10:00:24 +01:00
2018-06-20 17:16:22 +02:00
class ToTest151Images(ToTest150Images):
image_products = [
ImageProduct('livecd-leap-gnome', ['x86_64']),
ImageProduct('livecd-leap-kde', ['x86_64']),
ImageProduct('livecd-leap-x11', ['x86_64']),
ImageProduct('opensuse-leap-image:lxc', ['x86_64']),
ImageProduct('openSUSE-Leap-15.1-JeOS:MS-HyperV', ['x86_64']),
ImageProduct('openSUSE-Leap-15.1-JeOS:OpenStack-Cloud', ['x86_64']),
ImageProduct('openSUSE-Leap-15.1-JeOS:VMware', ['x86_64']),
ImageProduct('openSUSE-Leap-15.1-JeOS:XEN', ['x86_64']),
ImageProduct('openSUSE-Leap-15.1-JeOS:kvm-and-xen', ['x86_64']),
]
container_products = [ImageProduct('opensuse-leap-image:docker', ['x86_64'])]
2018-06-20 17:16:22 +02:00
def openqa_group(self):
return 'openSUSE Leap 15.1 Images'
2018-07-16 16:51:08 +02:00
class ToTestSLE(ToTestBaseNew):
def __init__(self, *args, **kwargs):
ToTestBaseNew.__init__(self, test_subproject='TEST', *args, **kwargs)
def openqa_group(self):
return 'Functional'
def get_current_snapshot(self):
return self.iso_build_version(self.project + ':TEST', self.main_products[0])
def ftp_build_version(self, project, tree):
return super(ToTestSLE, self).ftp_build_version(project, tree, base='SLE')
def iso_build_version(self, project, tree):
return super(ToTestSLE, self).iso_build_version(project, tree, base='SLE')
2018-07-16 16:51:08 +02:00
class ToTestSLE12(ToTestSLE):
main_products = [
'_product:SLES-dvd5-DVD-aarch64',
'_product:SLES-dvd5-DVD-ppc64le',
'_product:SLES-dvd5-DVD-s390x',
'_product:SLES-dvd5-DVD-x86_64',
]
ftp_products = [
'_product:SLES-ftp-POOL-aarch64',
'_product:SLES-ftp-POOL-ppc64le',
'_product:SLES-ftp-POOL-s390x',
'_product:SLES-ftp-POOL-x86_64',
]
class ToTestSLE15(ToTestSLE):
main_products = [
'000product:SLES-cd-DVD-aarch64',
'000product:SLES-cd-DVD-ppc64le',
'000product:SLES-cd-DVD-s390x',
'000product:SLES-cd-DVD-x86_64',
]
ftp_products = [
'000product:SLES-ftp-POOL-aarch64',
'000product:SLES-ftp-POOL-ppc64le',
'000product:SLES-ftp-POOL-s390x',
'000product:SLES-ftp-POOL-x86_64',
]
2017-02-02 17:41:24 +01:00
class CommandlineInterface(cmdln.Cmdln):
def __init__(self, *args, **kwargs):
cmdln.Cmdln.__init__(self, args, kwargs)
self.totest_class = {
'openSUSE:Factory': ToTestFactory,
'openSUSE:Factory:PowerPC': ToTestFactoryPowerPC,
'openSUSE:Factory:ARM': ToTestFactoryARM,
'openSUSE:Factory:zSystems': ToTestFactoryzSystems,
2018-06-20 17:16:22 +02:00
'openSUSE:Leap:15.1': ToTest151,
2018-07-10 13:26:02 +02:00
'openSUSE:Leap:15.1:ARM': ToTest151ARM,
'openSUSE:Leap:15.1:PowerPC': ToTest151PowerPC,
2017-12-17 10:00:24 +01:00
'openSUSE:Leap:15.0:Ports': ToTest150Ports,
'openSUSE:Leap:15.0:Images': ToTest150Images,
2018-06-20 17:16:22 +02:00
'openSUSE:Leap:15.1:Images': ToTest151Images,
2018-07-16 16:51:08 +02:00
'SUSE:SLE-12-SP4:GA': ToTestSLE12,
'SUSE:SLE-15:GA': ToTestSLE15,
'SUSE:SLE-15-SP1:GA': ToTestSLE15,
}
self.openqa_server = {
'openSUSE': 'https://openqa.opensuse.org',
'SUSE': 'https://openqa.suse.de',
}
self.api_url = {
'openSUSE': 'https://api.opensuse.org',
'SUSE': 'https://api.suse.de',
}
def get_optparser(self):
parser = cmdln.CmdlnOptionParser(self)
parser.add_option("--dry", action="store_true", help="dry run")
parser.add_option("--debug", action="store_true", help="debug output")
parser.add_option("--release", action="store_true", help="trigger release in build service (default for openSUSE)")
parser.add_option("--norelease", action="store_true", help="do not trigger release in build service (default for SLE)")
parser.add_option("--verbose", action="store_true", help="verbose")
parser.add_option(
"--osc-debug", action="store_true", help="osc debug output")
parser.add_option(
"--openqa-server", help="""Full URL to the openQA server that should be queried, default based on project selection, e.g.
'https://openqa.opensuse.org' for 'openSUSE'""")
parser.add_option(
"--obs-api-url", help="""Full URL to OBS instance to be queried, default based on project selection, e.g.
'https://api.opensuse.org' for 'openSUSE'""")
return parser
def postoptparse(self):
2015-07-01 11:48:23 +02:00
level = None
if (self.options.debug):
2015-07-01 11:48:23 +02:00
level = logging.DEBUG
elif (self.options.verbose):
2015-07-01 11:48:23 +02:00
level = logging.INFO
fmt = '%(module)s:%(lineno)d %(levelname)s %(message)s'
if os.isatty(0):
fmt = '%(asctime)s - ' + fmt
logging.basicConfig(level=level, format=fmt)
osc.conf.get_config()
if (self.options.osc_debug):
osc.conf.config['debug'] = True
def _setup_totest(self, project):
fallback_project = 'openSUSE:%s' % project
if project not in self.totest_class and fallback_project in self.totest_class:
project = fallback_project
project_base = project.split(':')[0]
if not self.options.openqa_server:
self.options.openqa_server = self.openqa_server[project_base]
if not self.options.obs_api_url:
self.options.obs_api_url = self.api_url[project_base]
Config(self.options.obs_api_url, project)
if project not in self.totest_class:
msg = 'Project %s not recognized. Possible values [%s]' % (
project, ', '.join(self.totest_class))
2015-06-26 09:09:39 +02:00
raise cmdln.CmdlnUserError(msg)
if self.options.release:
release = True
elif self.options.norelease:
release = False
else:
release = (project_base == 'openSUSE')
return self.totest_class[project](project, self.options.dry, not release, self.options.obs_api_url, self.options.openqa_server)
2015-04-20 15:00:02 +02:00
@cmdln.option('-n', '--interval', metavar="minutes", type="int", help="periodic interval in minutes")
def do_run(self, subcmd, opts, project='openSUSE:Factory'):
"""${cmd_name}: run the ToTest Manager
${cmd_usage}
${cmd_option_list}
"""
2015-04-20 15:00:02 +02:00
class ExTimeout(Exception):
2015-04-20 15:00:02 +02:00
"""raised on timeout"""
if opts.interval:
def alarm_called(nr, frame):
raise ExTimeout()
signal.signal(signal.SIGALRM, alarm_called)
while True:
try:
totest = self._setup_totest(project)
totest.totest()
except Exception as e:
2016-05-09 17:12:13 +02:00
logger.error(e)
2015-04-20 15:00:02 +02:00
if opts.interval:
if os.isatty(0):
logger.info(
"sleeping %d minutes. Press enter to check now ..." % opts.interval)
signal.alarm(opts.interval * 60)
try:
raw_input()
except ExTimeout:
pass
signal.alarm(0)
logger.info("recheck at %s" %
datetime.datetime.now().isoformat())
else:
logger.info("sleeping %d minutes." % opts.interval)
time.sleep(opts.interval * 60)
2017-01-26 20:25:16 +01:00
continue
2015-04-20 15:00:02 +02:00
break
def do_release(self, subcmd, opts, project='openSUSE:Factory'):
"""${cmd_name}: manually release all media. Use with caution!
${cmd_usage}
${cmd_option_list}
"""
totest = self._setup_totest(project)
totest.release()
if __name__ == "__main__":
app = CommandlineInterface()
sys.exit(app.main())