2017-10-16 23:18:16 +02:00
|
|
|
#!/usr/bin/python2
|
2014-05-27 11:14:35 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
|
|
|
# (C) 2014 mhrusecky@suse.cz, openSUSE.org
|
|
|
|
# (C) 2014 tchvatal@suse.cz, openSUSE.org
|
|
|
|
# (C) 2014 aplanas@suse.de, openSUSE.org
|
|
|
|
# (C) 2014 coolo@suse.de, openSUSE.org
|
2017-11-14 23:42:24 +01:00
|
|
|
# (C) 2017 okurz@suse.de, openSUSE.org
|
2014-05-27 11:14:35 +02:00
|
|
|
# Distribute under GPLv2 or GPLv3
|
|
|
|
|
2015-04-14 13:39:48 +02:00
|
|
|
import cmdln
|
2015-04-29 13:32:04 +02:00
|
|
|
import datetime
|
2014-09-12 11:42:42 +02:00
|
|
|
import json
|
2014-05-27 11:14:35 +02:00
|
|
|
import os
|
2014-09-12 11:42:42 +02:00
|
|
|
import re
|
2014-05-27 11:14:35 +02:00
|
|
|
import sys
|
2014-09-12 15:05:57 +02:00
|
|
|
import urllib2
|
2015-04-14 13:39:48 +02:00
|
|
|
import logging
|
2015-04-20 15:00:02 +02:00
|
|
|
import signal
|
2017-01-26 17:05:21 +01:00
|
|
|
import time
|
2014-09-12 15:05:57 +02:00
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
from xml.etree import cElementTree as ET
|
2017-03-29 14:24:11 +02:00
|
|
|
from openqa_client.client import OpenQA_Client
|
2014-05-27 11:14:35 +02:00
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
import osc
|
2014-05-28 15:14:58 +02:00
|
|
|
|
2015-02-20 13:18:09 +01:00
|
|
|
from osclib.conf import Config
|
2014-05-27 11:14:35 +02:00
|
|
|
from osclib.stagingapi import StagingAPI
|
2015-04-08 10:06:02 +02:00
|
|
|
from osc.core import makeurl
|
2014-05-27 11:14:35 +02:00
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
logger = logging.getLogger()
|
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
ISSUE_FILE = 'issues_to_ignore'
|
2014-09-12 11:42:42 +02:00
|
|
|
|
|
|
|
# QA Results
|
|
|
|
QA_INPROGRESS = 1
|
|
|
|
QA_FAILED = 2
|
|
|
|
QA_PASSED = 3
|
|
|
|
|
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
class NotFoundException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
class ToTestBase(object):
|
2017-03-29 14:24:11 +02:00
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
"""Base class to store the basic interface"""
|
|
|
|
|
2018-02-13 16:51:35 +01:00
|
|
|
product_repo = 'images'
|
|
|
|
product_arch = 'local'
|
|
|
|
livecd_repo = 'images'
|
|
|
|
livecd_archs = ['i586', 'x86_64']
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
def __init__(self, project, dryrun=False, api_url=None, openqa_server='https://openqa.opensuse.org', test_subproject=None):
|
2014-09-12 11:42:42 +02:00
|
|
|
self.project = project
|
2014-12-09 16:01:42 +01:00
|
|
|
self.dryrun = dryrun
|
2017-11-14 23:42:24 +01:00
|
|
|
if not api_url:
|
|
|
|
api_url = osc.conf.config['apiurl']
|
|
|
|
self.api = StagingAPI(api_url, project=project)
|
|
|
|
self.openqa_server = openqa_server
|
|
|
|
if not test_subproject:
|
|
|
|
test_subproject = 'ToTest'
|
|
|
|
self.test_project = '%s:%s' % (self.project, test_subproject)
|
|
|
|
self.openqa = OpenQA_Client(server=openqa_server)
|
2017-03-29 14:24:11 +02:00
|
|
|
self.issues_to_ignore = []
|
2017-03-30 20:01:31 +08:00
|
|
|
self.issuefile = "{}_{}".format(self.project, ISSUE_FILE)
|
|
|
|
if os.path.isfile(self.issuefile):
|
|
|
|
with open(self.issuefile, 'r') as f:
|
2017-03-29 14:24:11 +02:00
|
|
|
for line in f.readlines():
|
|
|
|
self.issues_to_ignore.append(line.strip())
|
2017-11-14 23:42:24 +01:00
|
|
|
self.project_base = project.split(':')[0]
|
2017-12-07 08:14:33 +01:00
|
|
|
self.update_pinned_descr = False
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2015-04-08 10:06:02 +02:00
|
|
|
def openqa_group(self):
|
2014-09-12 15:05:57 +02:00
|
|
|
return self.project
|
|
|
|
|
2014-11-11 12:57:10 +01:00
|
|
|
def iso_prefix(self):
|
|
|
|
return self.project
|
|
|
|
|
2015-04-07 14:11:57 +02:00
|
|
|
def jobs_num(self):
|
2015-10-20 06:51:40 +02:00
|
|
|
return 70
|
2015-04-07 14:11:57 +02:00
|
|
|
|
2015-05-03 16:08:22 +02:00
|
|
|
def current_version(self):
|
2015-05-14 08:34:45 +02:00
|
|
|
return self.release_version()
|
2015-05-03 16:08:22 +02:00
|
|
|
|
2014-09-12 15:05:57 +02:00
|
|
|
def binaries_of_product(self, project, product):
|
2018-02-13 16:51:35 +01:00
|
|
|
url = self.api.makeurl(['build', project, self.product_repo, self.product_arch, product])
|
2014-09-12 15:05:57 +02:00
|
|
|
try:
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
return []
|
|
|
|
|
|
|
|
ret = []
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for binary in root.findall('binary'):
|
|
|
|
ret.append(binary.get('filename'))
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
def get_current_snapshot(self):
|
2017-11-14 23:42:24 +01:00
|
|
|
"""Return the current snapshot in the test project"""
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
for binary in self.binaries_of_product(self.test_project, '_product:%s-cd-mini-%s' % (self.project_base, self.arch())):
|
|
|
|
result = re.match(r'%s-%s-NET-.*-Snapshot(.*)-Media.iso' % (self.project_base, self.iso_prefix()),
|
2014-09-12 15:05:57 +02:00
|
|
|
binary)
|
2014-09-12 11:42:42 +02:00
|
|
|
if result:
|
|
|
|
return result.group(1)
|
|
|
|
|
2014-09-12 15:05:57 +02:00
|
|
|
return None
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
def ftp_build_version(self, project, tree, base=None):
|
|
|
|
if not base:
|
|
|
|
base = self.project_base
|
|
|
|
for binary in self.binaries_of_product(project, tree):
|
|
|
|
result = re.match(r'%s.*Build(.*)-Media1.report' % base, binary)
|
2017-02-02 17:41:24 +01:00
|
|
|
if result:
|
|
|
|
return result.group(1)
|
2017-10-10 08:26:21 +02:00
|
|
|
raise NotFoundException("can't find %s ftp version" % project)
|
2015-05-03 16:08:22 +02:00
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
def iso_build_version(self, project, tree, base=None):
|
|
|
|
if not base:
|
|
|
|
base = self.project_base
|
|
|
|
for binary in self.binaries_of_product(project, tree):
|
|
|
|
result = re.match(r'%s.*Build(.*)-Media(.*).iso' % base, binary)
|
2017-02-02 17:41:24 +01:00
|
|
|
if result:
|
|
|
|
return result.group(1)
|
2017-10-10 08:26:21 +02:00
|
|
|
raise NotFoundException("can't find %s iso version" % project)
|
2015-05-03 16:08:22 +02:00
|
|
|
|
|
|
|
def release_version(self):
|
2017-11-14 23:42:24 +01:00
|
|
|
url = self.api.makeurl(['build', self.project, 'standard', self.arch(),
|
|
|
|
'_product:%s-release' % self.project_base])
|
2017-03-29 14:24:11 +02:00
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for binary in root.findall('binary'):
|
|
|
|
binary = binary.get('filename', '')
|
|
|
|
result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary)
|
|
|
|
if result:
|
|
|
|
return result.group(1)
|
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
raise NotFoundException("can't find %s version" % self.project)
|
2015-05-03 16:08:22 +02:00
|
|
|
|
2018-02-13 16:51:35 +01:00
|
|
|
def current_qa_version(self):
|
|
|
|
return self.api.dashboard_content_load('version_totest')
|
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
def find_openqa_results(self, snapshot):
|
|
|
|
"""Return the openqa jobs of a given snapshot and filter out the
|
|
|
|
cloned jobs
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
url = makeurl(self.openqa_server,
|
2017-09-03 21:38:16 +02:00
|
|
|
['api', 'v1', 'jobs'], {'group': self.openqa_group(), 'build': snapshot, 'latest': 1})
|
2014-09-12 11:42:42 +02:00
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
jobs = []
|
|
|
|
for job in json.load(f)['jobs']:
|
2016-03-22 17:40:56 +01:00
|
|
|
if job['clone_id'] or job['result'] == 'obsoleted':
|
2014-09-12 11:42:42 +02:00
|
|
|
continue
|
|
|
|
job['name'] = job['name'].replace(snapshot, '')
|
|
|
|
jobs.append(job)
|
|
|
|
return jobs
|
|
|
|
|
|
|
|
def _result2str(self, result):
|
|
|
|
if result == QA_INPROGRESS:
|
|
|
|
return 'inprogress'
|
|
|
|
elif result == QA_FAILED:
|
|
|
|
return 'failed'
|
|
|
|
else:
|
|
|
|
return 'passed'
|
|
|
|
|
2015-03-12 13:05:43 +01:00
|
|
|
def find_failed_module(self, testmodules):
|
|
|
|
# print json.dumps(testmodules, sort_keys=True, indent=4)
|
|
|
|
for module in testmodules:
|
|
|
|
if module['result'] != 'failed':
|
2014-09-12 11:42:42 +02:00
|
|
|
continue
|
|
|
|
flags = module['flags']
|
|
|
|
if 'fatal' in flags or 'important' in flags:
|
|
|
|
return module['name']
|
|
|
|
break
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info('%s %s %s' %
|
|
|
|
(module['name'], module['result'], module['flags']))
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2017-11-24 06:43:18 +01:00
|
|
|
def update_openqa_status_message(self):
|
2017-11-14 23:42:24 +01:00
|
|
|
url = makeurl(self.openqa_server,
|
2017-04-19 17:42:36 +08:00
|
|
|
['api', 'v1', 'job_groups'])
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
job_groups = json.load(f)
|
|
|
|
group_id = 0
|
|
|
|
for jg in job_groups:
|
|
|
|
if jg['name'] == self.openqa_group():
|
|
|
|
group_id = jg['id']
|
|
|
|
break
|
|
|
|
|
2017-11-24 06:43:18 +01:00
|
|
|
if not group_id:
|
|
|
|
logger.debug('No openQA group id found for status comment update, ignoring')
|
|
|
|
return
|
|
|
|
|
|
|
|
pinned_ignored_issue = 0
|
|
|
|
issues = ' , '.join(self.issues_to_ignore)
|
|
|
|
status_flag = 'publishing' if self.status_for_openqa['is_publishing'] else \
|
|
|
|
'preparing' if self.status_for_openqa['can_release'] else \
|
|
|
|
'testing' if self.status_for_openqa['snapshotable'] else \
|
|
|
|
'building'
|
|
|
|
status_msg = "tag:{}:{}:{}".format(self.status_for_openqa['new_snapshot'], status_flag, status_flag)
|
|
|
|
msg = "pinned-description: Ignored issues\r\n\r\n{}\r\n\r\n{}".format(issues, status_msg)
|
|
|
|
data = {'text': msg}
|
|
|
|
|
|
|
|
url = makeurl(self.openqa_server,
|
|
|
|
['api', 'v1', 'groups', str(group_id), 'comments'])
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
comments = json.load(f)
|
|
|
|
for comment in comments:
|
|
|
|
if comment['userName'] == 'ttm' and \
|
|
|
|
comment['text'].startswith('pinned-description: Ignored issues'):
|
|
|
|
pinned_ignored_issue = comment['id']
|
|
|
|
|
|
|
|
logger.debug('Writing openQA status message: {}'.format(data))
|
|
|
|
if not self.dryrun:
|
|
|
|
if pinned_ignored_issue:
|
|
|
|
self.openqa.openqa_request(
|
|
|
|
'PUT', 'groups/%s/comments/%d' % (group_id, pinned_ignored_issue), data=data)
|
|
|
|
else:
|
|
|
|
self.openqa.openqa_request(
|
|
|
|
'POST', 'groups/%s/comments' % group_id, data=data)
|
2017-04-19 17:42:36 +08:00
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
def overall_result(self, snapshot):
|
|
|
|
"""Analyze the openQA jobs of a given snapshot Returns a QAResult"""
|
|
|
|
|
2015-02-20 13:35:34 +01:00
|
|
|
if snapshot is None:
|
2014-09-12 15:05:57 +02:00
|
|
|
return QA_FAILED
|
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
jobs = self.find_openqa_results(snapshot)
|
|
|
|
|
2015-04-07 14:11:57 +02:00
|
|
|
if len(jobs) < self.jobs_num(): # not yet scheduled
|
2016-05-09 17:12:13 +02:00
|
|
|
logger.warning('we have only %s jobs' % len(jobs))
|
2014-09-12 11:42:42 +02:00
|
|
|
return QA_INPROGRESS
|
|
|
|
|
|
|
|
number_of_fails = 0
|
|
|
|
in_progress = False
|
|
|
|
for job in jobs:
|
|
|
|
# print json.dumps(job, sort_keys=True, indent=4)
|
2017-11-14 23:42:24 +01:00
|
|
|
if job['result'] in ('failed', 'incomplete', 'skipped', 'user_cancelled', 'obsoleted', 'parallel_failed'):
|
2017-02-03 14:35:10 +01:00
|
|
|
jobname = job['name']
|
2014-09-12 11:42:42 +02:00
|
|
|
# print json.dumps(job, sort_keys=True, indent=4), jobname
|
2017-11-14 23:42:24 +01:00
|
|
|
url = makeurl(self.openqa_server,
|
2017-03-29 14:24:11 +02:00
|
|
|
['api', 'v1', 'jobs', str(job['id']), 'comments'])
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
comments = json.load(f)
|
|
|
|
refs = set()
|
|
|
|
labeled = 0
|
|
|
|
to_ignore = False
|
|
|
|
for comment in comments:
|
|
|
|
for ref in comment['bugrefs']:
|
|
|
|
refs.add(str(ref))
|
|
|
|
if comment['userName'] == 'ttm' and comment['text'] == 'label:unknown_failure':
|
|
|
|
labeled = comment['id']
|
2017-12-19 10:23:41 +01:00
|
|
|
if re.search(r'@ttm:? ignore', comment['text']):
|
2017-03-29 14:24:11 +02:00
|
|
|
to_ignore = True
|
2017-03-30 19:26:43 +08:00
|
|
|
ignored = len(refs) > 0
|
2017-03-29 14:24:11 +02:00
|
|
|
for ref in refs:
|
2017-03-30 18:37:31 +08:00
|
|
|
if ref not in self.issues_to_ignore:
|
2017-03-29 14:24:11 +02:00
|
|
|
if to_ignore:
|
|
|
|
self.issues_to_ignore.append(ref)
|
2017-11-24 06:43:18 +01:00
|
|
|
self.update_pinned_descr = True
|
2017-03-30 20:01:31 +08:00
|
|
|
with open(self.issuefile, 'a') as f:
|
2017-03-29 14:24:11 +02:00
|
|
|
f.write("%s\n" % ref)
|
|
|
|
else:
|
|
|
|
ignored = False
|
2017-03-30 19:26:43 +08:00
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
if not ignored:
|
|
|
|
number_of_fails += 1
|
2017-06-18 14:21:46 +02:00
|
|
|
if not labeled and len(refs) > 0 and not self.dryrun:
|
2017-03-29 14:24:11 +02:00
|
|
|
data = {'text': 'label:unknown_failure'}
|
|
|
|
self.openqa.openqa_request(
|
|
|
|
'POST', 'jobs/%s/comments' % job['id'], data=data)
|
|
|
|
elif labeled:
|
|
|
|
# remove flag - unfortunately can't delete comment unless admin
|
|
|
|
data = {'text': 'Ignored issue'}
|
|
|
|
self.openqa.openqa_request(
|
|
|
|
'PUT', 'jobs/%s/comments/%d' % (job['id'], labeled), data=data)
|
|
|
|
|
2017-03-30 18:37:31 +08:00
|
|
|
if ignored:
|
|
|
|
logger.info("job %s failed, but was ignored", jobname)
|
|
|
|
else:
|
2017-11-14 23:42:24 +01:00
|
|
|
joburl = '%s/tests/%s' % (self.openqa_server, job['id'])
|
2017-03-30 18:37:31 +08:00
|
|
|
logger.info("job %s failed, see %s", jobname, joburl)
|
|
|
|
|
2016-08-01 16:34:36 +02:00
|
|
|
elif job['result'] == 'passed' or job['result'] == 'softfailed':
|
2014-09-12 11:42:42 +02:00
|
|
|
continue
|
|
|
|
elif job['result'] == 'none':
|
|
|
|
if job['state'] != 'cancelled':
|
|
|
|
in_progress = True
|
|
|
|
else:
|
|
|
|
raise Exception(job['result'])
|
|
|
|
|
|
|
|
if number_of_fails > 0:
|
|
|
|
return QA_FAILED
|
|
|
|
|
|
|
|
if in_progress:
|
|
|
|
return QA_INPROGRESS
|
|
|
|
|
|
|
|
return QA_PASSED
|
|
|
|
|
|
|
|
def all_repos_done(self, project, codes=None):
|
|
|
|
"""Check the build result of the project and only return True if all
|
|
|
|
repos of that project are either published or unpublished
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2016-02-11 11:52:45 +01:00
|
|
|
# coolo's experience says that 'finished' won't be
|
|
|
|
# sufficient here, so don't try to add it :-)
|
2014-09-12 11:42:42 +02:00
|
|
|
codes = ['published', 'unpublished'] if not codes else codes
|
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
url = self.api.makeurl(
|
|
|
|
['build', project, '_result'], {'code': 'failed'})
|
2014-09-12 11:42:42 +02:00
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
2015-04-29 13:31:52 +02:00
|
|
|
ready = True
|
2014-09-12 11:42:42 +02:00
|
|
|
for repo in root.findall('result'):
|
2015-04-29 13:31:30 +02:00
|
|
|
# ignore ports. 'factory' is used by arm for repos that are not
|
|
|
|
# meant to use the totest manager.
|
2016-02-11 11:52:45 +01:00
|
|
|
if repo.get('repository') in ('ports', 'factory', 'images_staging'):
|
2014-10-31 11:11:52 +01:00
|
|
|
continue
|
2015-04-14 09:57:55 +02:00
|
|
|
# ignore 32bit for now. We're only interesed in aarch64 here
|
|
|
|
if repo.get('arch') in ('armv6l', 'armv7l'):
|
|
|
|
continue
|
2014-09-12 11:42:42 +02:00
|
|
|
if repo.get('dirty', '') == 'true':
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info('%s %s %s -> %s' % (repo.get('project'),
|
|
|
|
repo.get('repository'), repo.get('arch'), 'dirty'))
|
2015-04-29 13:31:52 +02:00
|
|
|
ready = False
|
2014-09-12 11:42:42 +02:00
|
|
|
if repo.get('code') not in codes:
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info('%s %s %s -> %s' % (repo.get('project'),
|
|
|
|
repo.get('repository'), repo.get('arch'), repo.get('code')))
|
2015-04-29 13:31:52 +02:00
|
|
|
ready = False
|
|
|
|
return ready
|
2014-09-12 11:42:42 +02:00
|
|
|
|
|
|
|
def maxsize_for_package(self, package):
|
|
|
|
if re.match(r'.*-mini-.*', package):
|
|
|
|
return 737280000 # a CD needs to match
|
|
|
|
|
|
|
|
if re.match(r'.*-dvd5-.*', package):
|
|
|
|
return 4700372992 # a DVD needs to match
|
|
|
|
|
2017-08-28 13:47:10 +02:00
|
|
|
if re.match(r'livecd-x11', package):
|
2014-09-12 11:42:42 +02:00
|
|
|
return 681574400 # not a full CD
|
|
|
|
|
2017-08-28 13:47:10 +02:00
|
|
|
if re.match(r'livecd-.*', package):
|
2014-09-12 11:42:42 +02:00
|
|
|
return 999999999 # a GB stick
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
if re.match(r'.*-(dvd9-dvd|cd-DVD)-.*', package):
|
2014-10-14 14:44:27 +02:00
|
|
|
return 8539996159
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
if re.match(r'.*-ftp-(ftp|POOL)-', package):
|
2014-09-12 11:42:42 +02:00
|
|
|
return None
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
if ':%s-Addon-NonOss-ftp-ftp' % self.base in package:
|
2014-09-12 11:42:42 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
raise Exception('No maxsize for {}'.format(package))
|
|
|
|
|
|
|
|
def package_ok(self, project, package, repository, arch):
|
|
|
|
"""Checks one package in a project and returns True if it's succeeded
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
query = {'package': package, 'repository': repository, 'arch': arch}
|
|
|
|
|
|
|
|
url = self.api.makeurl(['build', project, '_result'], query)
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for repo in root.findall('result'):
|
|
|
|
status = repo.find('status')
|
|
|
|
if status.get('code') != 'succeeded':
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info(
|
|
|
|
'%s %s %s %s -> %s' % (project, package, repository, arch, status.get('code')))
|
2014-09-12 11:42:42 +02:00
|
|
|
return False
|
|
|
|
|
2014-09-12 15:05:57 +02:00
|
|
|
maxsize = self.maxsize_for_package(package)
|
2014-09-12 11:42:42 +02:00
|
|
|
if not maxsize:
|
|
|
|
return True
|
|
|
|
|
|
|
|
url = self.api.makeurl(['build', project, repository, arch, package])
|
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for binary in root.findall('binary'):
|
|
|
|
if not binary.get('filename', '').endswith('.iso'):
|
|
|
|
continue
|
|
|
|
isosize = int(binary.get('size', 0))
|
|
|
|
if isosize > maxsize:
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.error('%s %s %s %s: %s' % (
|
|
|
|
project, package, repository, arch, 'too large by %s bytes' % (isosize - maxsize)))
|
2014-09-12 11:42:42 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2017-02-02 17:41:24 +01:00
|
|
|
def is_snapshottable(self):
|
2014-09-12 11:42:42 +02:00
|
|
|
"""Check various conditions required for factory to be snapshotable
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
if not self.all_repos_done(self.project):
|
2014-09-12 11:42:42 +02:00
|
|
|
return False
|
|
|
|
|
2015-03-19 09:58:46 +01:00
|
|
|
for product in self.ftp_products + self.main_products:
|
2018-02-13 16:51:35 +01:00
|
|
|
if not self.package_ok(self.project, product, self.product_repo, self.product_arch):
|
2014-09-12 11:42:42 +02:00
|
|
|
return False
|
|
|
|
|
2015-03-21 16:06:18 +01:00
|
|
|
if len(self.livecd_products):
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
if not self.all_repos_done('%s:Live' % self.project):
|
2015-03-19 09:58:46 +01:00
|
|
|
return False
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2018-02-13 16:51:35 +01:00
|
|
|
for arch in self.livecd_archs:
|
2015-03-19 09:58:46 +01:00
|
|
|
for product in self.livecd_products:
|
2018-02-13 16:51:35 +01:00
|
|
|
if not self.package_ok('%s:Live' % self.project, product, self.livecd_repo, arch):
|
2015-03-19 09:58:46 +01:00
|
|
|
return False
|
2014-09-12 11:42:42 +02:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
def _release_package(self, project, package, set_release=None):
|
2014-09-12 11:42:42 +02:00
|
|
|
query = {'cmd': 'release'}
|
|
|
|
|
|
|
|
if set_release:
|
|
|
|
query['setrelease'] = set_release
|
|
|
|
|
2016-05-09 17:14:13 +02:00
|
|
|
# FIXME: make configurable. openSUSE:Factory:ARM currently has multiple
|
|
|
|
# repos with release targets, so obs needs to know which one to release
|
|
|
|
if project == 'openSUSE:Factory:ARM':
|
|
|
|
query['repository'] = 'images'
|
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
baseurl = ['source', project, package]
|
|
|
|
|
|
|
|
url = self.api.makeurl(baseurl, query=query)
|
2015-04-14 13:39:48 +02:00
|
|
|
if self.dryrun:
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info("release %s/%s (%s)" % (project, package, set_release))
|
2015-04-14 13:39:48 +02:00
|
|
|
else:
|
|
|
|
self.api.retried_POST(url)
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
def _release(self, set_release=None):
|
|
|
|
for product in self.ftp_products:
|
2017-11-14 23:42:24 +01:00
|
|
|
self._release_package(self.project, product)
|
2017-10-10 08:26:21 +02:00
|
|
|
|
|
|
|
for cd in self.livecd_products:
|
2017-11-14 23:42:24 +01:00
|
|
|
self._release_package('%s:Live' %
|
2017-10-16 23:18:16 +02:00
|
|
|
self.project, cd, set_release=set_release)
|
2017-10-10 08:26:21 +02:00
|
|
|
|
|
|
|
for cd in self.main_products:
|
2017-11-14 23:42:24 +01:00
|
|
|
self._release_package(self.project, cd, set_release=set_release)
|
2017-10-10 08:26:21 +02:00
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
def update_totest(self, snapshot=None):
|
2017-02-02 17:41:24 +01:00
|
|
|
release = 'Snapshot%s' % snapshot if snapshot else None
|
2016-05-09 17:12:13 +02:00
|
|
|
logger.info('Updating snapshot %s' % snapshot)
|
2015-04-14 13:39:48 +02:00
|
|
|
if not self.dryrun:
|
2017-11-14 23:42:24 +01:00
|
|
|
self.api.switch_flag_in_prj(self.test_project, flag='publish', state='disable')
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2017-10-16 23:18:16 +02:00
|
|
|
self._release(set_release=release)
|
2014-09-12 11:42:42 +02:00
|
|
|
|
|
|
|
def publish_factory_totest(self):
|
2017-11-14 23:42:24 +01:00
|
|
|
logger.info('Publish test project content')
|
2015-04-14 13:39:48 +02:00
|
|
|
if not self.dryrun:
|
2017-03-29 14:24:11 +02:00
|
|
|
self.api.switch_flag_in_prj(
|
2017-11-14 23:42:24 +01:00
|
|
|
self.test_project, flag='publish', state='enable')
|
2014-09-12 11:42:42 +02:00
|
|
|
|
|
|
|
def totest_is_publishing(self):
|
|
|
|
"""Find out if the publishing flag is set in totest's _meta"""
|
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
url = self.api.makeurl(
|
2017-11-14 23:42:24 +01:00
|
|
|
['source', self.test_project, '_meta'])
|
2014-09-12 11:42:42 +02:00
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
if not root.find('publish'): # default true
|
|
|
|
return True
|
|
|
|
|
|
|
|
for flag in root.find('publish'):
|
|
|
|
if flag.get('repository', None) or flag.get('arch', None):
|
|
|
|
continue
|
|
|
|
if flag.tag == 'enable':
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def totest(self):
|
2017-10-10 08:26:21 +02:00
|
|
|
try:
|
|
|
|
current_snapshot = self.get_current_snapshot()
|
2017-10-16 22:47:58 +02:00
|
|
|
except NotFoundException as e:
|
2017-11-14 23:42:24 +01:00
|
|
|
# nothing in test project (yet)
|
2017-10-10 08:26:21 +02:00
|
|
|
logger.warn(e)
|
|
|
|
current_snapshot = None
|
2014-09-12 15:05:57 +02:00
|
|
|
new_snapshot = self.current_version()
|
2017-12-07 08:14:33 +01:00
|
|
|
self.update_pinned_descr = False
|
2014-09-12 11:42:42 +02:00
|
|
|
current_result = self.overall_result(current_snapshot)
|
2018-02-13 16:51:35 +01:00
|
|
|
current_qa_version = self.current_qa_version()
|
2014-09-12 15:05:57 +02:00
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info('current_snapshot %s: %s' %
|
|
|
|
(current_snapshot, self._result2str(current_result)))
|
2016-05-09 17:12:13 +02:00
|
|
|
logger.debug('new_snapshot %s', new_snapshot)
|
2016-06-03 14:39:56 +02:00
|
|
|
logger.debug('current_qa_version %s', current_qa_version)
|
2014-09-12 15:05:57 +02:00
|
|
|
|
2017-02-02 17:41:24 +01:00
|
|
|
snapshotable = self.is_snapshottable()
|
2016-05-09 17:12:13 +02:00
|
|
|
logger.debug("snapshotable: %s", snapshotable)
|
2018-02-13 16:51:35 +01:00
|
|
|
can_release = ((current_snapshot is None or current_result != QA_INPROGRESS) and snapshotable)
|
2015-02-20 13:18:09 +01:00
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
# not overwriting
|
|
|
|
if new_snapshot == current_snapshot:
|
2016-05-09 17:12:13 +02:00
|
|
|
logger.debug("no change in snapshot version")
|
2014-09-12 11:42:42 +02:00
|
|
|
can_release = False
|
2017-11-14 23:42:24 +01:00
|
|
|
elif not self.all_repos_done(self.test_project):
|
2016-05-09 17:12:13 +02:00
|
|
|
logger.debug("not all repos done, can't release")
|
2017-03-29 14:24:11 +02:00
|
|
|
# the repos have to be done, otherwise we better not touch them
|
|
|
|
# with a new release
|
2014-09-12 11:42:42 +02:00
|
|
|
can_release = False
|
|
|
|
|
|
|
|
can_publish = (current_result == QA_PASSED)
|
|
|
|
|
|
|
|
# already published
|
2017-11-24 06:43:18 +01:00
|
|
|
totest_is_publishing = self.totest_is_publishing()
|
|
|
|
if totest_is_publishing:
|
2016-05-09 17:12:13 +02:00
|
|
|
logger.debug("totest already publishing")
|
2014-09-12 11:42:42 +02:00
|
|
|
can_publish = False
|
|
|
|
|
2017-11-24 06:43:18 +01:00
|
|
|
if self.update_pinned_descr:
|
|
|
|
self.status_for_openqa = {
|
|
|
|
'current_snapshot': current_snapshot,
|
|
|
|
'new_snapshot': new_snapshot,
|
|
|
|
'snapshotable': snapshotable,
|
|
|
|
'can_release': can_release,
|
|
|
|
'is_publishing': totest_is_publishing,
|
|
|
|
}
|
|
|
|
self.update_openqa_status_message()
|
|
|
|
|
2015-04-14 13:39:48 +02:00
|
|
|
if can_publish:
|
2016-03-23 16:09:27 +01:00
|
|
|
if current_qa_version == current_snapshot:
|
|
|
|
self.publish_factory_totest()
|
|
|
|
self.write_version_to_dashboard("snapshot", current_snapshot)
|
|
|
|
can_release = False # we have to wait
|
|
|
|
else:
|
|
|
|
# We reached a very bad status: openQA testing is 'done', but not of the same version
|
2017-11-14 23:42:24 +01:00
|
|
|
# currently in test project. This can happen when 'releasing' the
|
2017-03-29 14:24:11 +02:00
|
|
|
# product failed
|
2017-11-14 23:42:24 +01:00
|
|
|
raise Exception("Publishing stopped: tested version (%s) does not match version in test project (%s)"
|
2017-03-29 14:24:11 +02:00
|
|
|
% (current_qa_version, current_snapshot))
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2015-04-14 13:39:48 +02:00
|
|
|
if can_release:
|
2014-09-16 09:12:08 +02:00
|
|
|
self.update_totest(new_snapshot)
|
2015-11-20 07:05:49 +01:00
|
|
|
self.write_version_to_dashboard("totest", new_snapshot)
|
2014-09-12 11:42:42 +02:00
|
|
|
|
2015-04-14 09:58:53 +02:00
|
|
|
def release(self):
|
|
|
|
new_snapshot = self.current_version()
|
|
|
|
self.update_totest(new_snapshot)
|
|
|
|
|
2015-11-19 16:09:44 +01:00
|
|
|
def write_version_to_dashboard(self, target, version):
|
2015-11-20 07:05:49 +01:00
|
|
|
if not self.dryrun:
|
2017-03-29 14:24:11 +02:00
|
|
|
url = self.api.makeurl(
|
2017-11-14 23:42:24 +01:00
|
|
|
['source', '%s:Staging' % self.project, 'dashboard', 'version_%s' % target])
|
2015-11-20 07:05:49 +01:00
|
|
|
osc.core.http_PUT(url + '?comment=Update+version', data=version)
|
2015-02-20 13:18:09 +01:00
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
class ToTestBaseNew(ToTestBase):
|
|
|
|
|
|
|
|
"""Base class for new product builder"""
|
|
|
|
def _release(self, set_release=None):
|
|
|
|
query = {'cmd': 'release'}
|
|
|
|
|
|
|
|
package = '000product'
|
2017-11-14 23:42:24 +01:00
|
|
|
project = self.project
|
2017-10-10 08:26:21 +02:00
|
|
|
|
|
|
|
if set_release:
|
|
|
|
query['setrelease'] = set_release
|
|
|
|
|
|
|
|
baseurl = ['source', project, package]
|
|
|
|
|
|
|
|
url = self.api.makeurl(baseurl, query=query)
|
|
|
|
if self.dryrun:
|
|
|
|
logger.info("release %s/%s (%s)" % (project, package, set_release))
|
|
|
|
else:
|
|
|
|
self.api.retried_POST(url)
|
|
|
|
|
|
|
|
# XXX still legacy
|
|
|
|
for cd in self.livecd_products:
|
2017-11-14 23:42:24 +01:00
|
|
|
self._release_package('%s:Live' %
|
2017-10-16 23:18:16 +02:00
|
|
|
self.project, cd, set_release=set_release)
|
2017-10-10 08:26:21 +02:00
|
|
|
|
|
|
|
def release_version(self):
|
2017-11-14 23:42:24 +01:00
|
|
|
url = self.api.makeurl(['build', self.project, 'standard', self.arch(),
|
|
|
|
'000product:%s-release' % self.project_base])
|
2017-10-10 08:26:21 +02:00
|
|
|
f = self.api.retried_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for binary in root.findall('binary'):
|
|
|
|
binary = binary.get('filename', '')
|
|
|
|
result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary)
|
|
|
|
if result:
|
|
|
|
return result.group(1)
|
|
|
|
|
|
|
|
raise NotFoundException("can't find %s release version" % self.project)
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
def current_version(self):
|
|
|
|
return self.iso_build_version(self.project, self.main_products[0])
|
|
|
|
|
|
|
|
def is_snapshottable(self):
|
|
|
|
ret = super(ToTestBaseNew, self).is_snapshottable()
|
|
|
|
if ret:
|
|
|
|
# make sure all medias have the same build number
|
|
|
|
builds = set()
|
|
|
|
for p in self.ftp_products:
|
|
|
|
if 'Addon-NonOss' in p:
|
|
|
|
# XXX: don't care about nonoss atm.
|
|
|
|
continue
|
|
|
|
builds.add(self.ftp_build_version(self.project, p))
|
|
|
|
for p in self.main_products + self.livecd_products:
|
|
|
|
builds.add(self.iso_build_version(self.project, p))
|
|
|
|
|
|
|
|
ret = (len(builds) == 1)
|
2018-02-13 16:51:35 +01:00
|
|
|
if ret is False:
|
|
|
|
logger.debug("not all medias have the same build number")
|
2017-11-14 23:42:24 +01:00
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
def update_totest(self, snapshot):
|
|
|
|
# omit snapshot, we don't want to rename on release
|
|
|
|
super(ToTestBaseNew, self).update_totest()
|
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
|
2014-09-12 11:42:42 +02:00
|
|
|
class ToTestFactory(ToTestBase):
|
2014-10-13 17:07:24 +02:00
|
|
|
main_products = ['_product:openSUSE-dvd5-dvd-i586',
|
|
|
|
'_product:openSUSE-dvd5-dvd-x86_64',
|
|
|
|
'_product:openSUSE-cd-mini-i586',
|
2017-05-22 17:32:22 +02:00
|
|
|
'_product:openSUSE-cd-mini-x86_64',
|
2017-08-17 14:56:03 +02:00
|
|
|
'_product:openSUSE-Tumbleweed-Kubic-dvd5-dvd-x86_64']
|
2014-05-27 11:14:35 +02:00
|
|
|
|
2015-03-19 09:58:46 +01:00
|
|
|
ftp_products = ['_product:openSUSE-ftp-ftp-i586_x86_64',
|
|
|
|
'_product:openSUSE-Addon-NonOss-ftp-ftp-i586_x86_64']
|
|
|
|
|
2018-02-21 09:01:42 +01:00
|
|
|
livecd_products = ['livecd-tumbleweed-kde',
|
|
|
|
'livecd-tumbleweed-gnome',
|
|
|
|
'livecd-tumbleweed-x11']
|
2015-03-19 09:58:46 +01:00
|
|
|
|
2015-07-01 11:48:23 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToTestBase.__init__(self, *args, **kwargs)
|
2014-09-12 15:05:57 +02:00
|
|
|
|
2015-03-31 15:35:30 +02:00
|
|
|
def openqa_group(self):
|
2015-04-08 10:06:02 +02:00
|
|
|
return 'openSUSE Tumbleweed'
|
2015-03-31 15:35:30 +02:00
|
|
|
|
2014-11-11 12:57:10 +01:00
|
|
|
def iso_prefix(self):
|
|
|
|
return 'Tumbleweed'
|
|
|
|
|
2015-03-19 09:58:46 +01:00
|
|
|
def arch(self):
|
|
|
|
return 'x86_64'
|
|
|
|
|
2014-05-27 13:26:29 +02:00
|
|
|
|
2015-03-19 09:58:46 +01:00
|
|
|
class ToTestFactoryPowerPC(ToTestBase):
|
2015-06-08 17:41:30 +02:00
|
|
|
main_products = ['_product:openSUSE-dvd5-dvd-ppc64',
|
|
|
|
'_product:openSUSE-dvd5-dvd-ppc64le',
|
2015-03-19 09:58:46 +01:00
|
|
|
'_product:openSUSE-cd-mini-ppc64',
|
|
|
|
'_product:openSUSE-cd-mini-ppc64le']
|
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
ftp_products = ['_product:openSUSE-ftp-ftp-ppc64_ppc64le']
|
2015-03-19 09:58:46 +01:00
|
|
|
|
2015-03-21 16:06:18 +01:00
|
|
|
livecd_products = []
|
|
|
|
|
2015-07-01 11:48:23 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToTestBase.__init__(self, *args, **kwargs)
|
2015-03-19 09:58:46 +01:00
|
|
|
|
2015-03-31 15:35:30 +02:00
|
|
|
def openqa_group(self):
|
2015-04-08 10:06:02 +02:00
|
|
|
return 'openSUSE Tumbleweed PowerPC'
|
2015-03-31 15:35:30 +02:00
|
|
|
|
2015-03-19 09:58:46 +01:00
|
|
|
def arch(self):
|
|
|
|
return 'ppc64le'
|
|
|
|
|
|
|
|
def iso_prefix(self):
|
|
|
|
return 'Tumbleweed'
|
|
|
|
|
2015-04-07 14:11:57 +02:00
|
|
|
def jobs_num(self):
|
|
|
|
return 4
|
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
|
2017-01-20 11:46:42 +01:00
|
|
|
class ToTestFactoryzSystems(ToTestBase):
|
2017-08-29 15:46:29 +02:00
|
|
|
main_products = ['000product:openSUSE-dvd5-dvd-s390x',
|
|
|
|
'000product:openSUSE-cd-mini-s390x']
|
2017-01-20 11:46:42 +01:00
|
|
|
|
2017-08-29 15:46:29 +02:00
|
|
|
ftp_products = ['000product:openSUSE-ftp-ftp-s390x']
|
2017-01-20 11:46:42 +01:00
|
|
|
|
|
|
|
livecd_products = []
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToTestBase.__init__(self, *args, **kwargs)
|
|
|
|
|
|
|
|
def openqa_group(self):
|
2017-02-07 09:50:22 +01:00
|
|
|
return 'openSUSE Tumbleweed s390x'
|
2017-01-20 11:46:42 +01:00
|
|
|
|
|
|
|
def arch(self):
|
|
|
|
return 's390x'
|
|
|
|
|
|
|
|
def iso_prefix(self):
|
|
|
|
return 'Tumbleweed'
|
|
|
|
|
|
|
|
def jobs_num(self):
|
2017-02-07 09:50:22 +01:00
|
|
|
return 1
|
2017-01-20 11:46:42 +01:00
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
|
2015-04-08 15:24:36 +02:00
|
|
|
class ToTestFactoryARM(ToTestFactory):
|
2017-03-29 14:24:11 +02:00
|
|
|
main_products = ['_product:openSUSE-cd-mini-aarch64',
|
|
|
|
'_product:openSUSE-dvd5-dvd-aarch64']
|
2015-04-08 15:24:36 +02:00
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
ftp_products = ['_product:openSUSE-ftp-ftp-aarch64']
|
2015-04-08 15:24:36 +02:00
|
|
|
|
|
|
|
livecd_products = []
|
|
|
|
|
2015-07-01 11:48:23 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToTestFactory.__init__(self, *args, **kwargs)
|
2015-04-08 15:24:36 +02:00
|
|
|
|
|
|
|
def openqa_group(self):
|
2015-04-23 13:09:49 +02:00
|
|
|
return 'openSUSE Tumbleweed AArch64'
|
2015-04-08 15:24:36 +02:00
|
|
|
|
|
|
|
def arch(self):
|
|
|
|
return 'aarch64'
|
|
|
|
|
|
|
|
def jobs_num(self):
|
2015-04-23 13:09:49 +02:00
|
|
|
return 2
|
2015-02-20 13:18:09 +01:00
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
class ToTest150(ToTestBaseNew):
|
2015-02-20 13:35:34 +01:00
|
|
|
main_products = [
|
2017-10-10 08:26:21 +02:00
|
|
|
'000product:openSUSE-cd-mini-x86_64',
|
|
|
|
'000product:openSUSE-dvd5-dvd-x86_64',
|
2014-10-13 17:07:24 +02:00
|
|
|
]
|
2015-02-20 13:35:34 +01:00
|
|
|
|
2017-10-10 08:26:21 +02:00
|
|
|
ftp_products = ['000product:openSUSE-ftp-ftp-x86_64',
|
2017-11-09 14:55:40 +01:00
|
|
|
'000product:openSUSE-Addon-NonOss-ftp-ftp-x86_64'
|
2017-10-10 08:26:21 +02:00
|
|
|
]
|
2017-02-02 17:41:24 +01:00
|
|
|
|
|
|
|
livecd_products = []
|
|
|
|
|
|
|
|
def openqa_group(self):
|
2017-10-10 08:26:21 +02:00
|
|
|
return 'openSUSE Leap 15.0'
|
2017-02-02 17:41:24 +01:00
|
|
|
|
|
|
|
def get_current_snapshot(self):
|
|
|
|
return self.iso_build_version(self.project + ':ToTest', self.main_products[0])
|
|
|
|
|
|
|
|
|
2017-12-17 10:00:24 +01:00
|
|
|
class ToTest150Ports(ToTestBaseNew):
|
|
|
|
main_products = [
|
|
|
|
'000product:openSUSE-cd-mini-aarch64',
|
|
|
|
'000product:openSUSE-dvd5-dvd-aarch64',
|
|
|
|
]
|
|
|
|
|
|
|
|
ftp_products = ['000product:openSUSE-ftp-ftp-aarch64',
|
|
|
|
]
|
|
|
|
|
|
|
|
livecd_products = []
|
|
|
|
|
|
|
|
def openqa_group(self):
|
|
|
|
return 'openSUSE Leap 15.0 Ports'
|
|
|
|
|
|
|
|
def get_current_snapshot(self):
|
|
|
|
return self.iso_build_version(self.project + ':ToTest', self.main_products[0])
|
|
|
|
|
|
|
|
|
2018-02-13 16:51:35 +01:00
|
|
|
class ToTest150Images(ToTestBaseNew):
|
|
|
|
main_products = [
|
|
|
|
'livecd-leap-gnome',
|
|
|
|
'livecd-leap-kde',
|
|
|
|
'livecd-leap-x11',
|
|
|
|
]
|
|
|
|
|
|
|
|
ftp_products = []
|
|
|
|
|
|
|
|
livecd_products = []
|
|
|
|
product_arch = 'x86_64'
|
|
|
|
|
|
|
|
def openqa_group(self):
|
|
|
|
return 'openSUSE Leap 15.0 Images'
|
|
|
|
|
|
|
|
def current_qa_version(self):
|
|
|
|
return self.api.dashboard_content_load('version_totest_images')
|
|
|
|
|
|
|
|
def write_version_to_dashboard(self, target, version):
|
|
|
|
super(ToTest150Images, self).write_version_to_dashboard('{}_images'.format(target), version)
|
|
|
|
|
|
|
|
def get_current_snapshot(self):
|
|
|
|
return self.iso_build_version(self.project + ':ToTest', self.main_products[0])
|
|
|
|
|
|
|
|
def _release(self, set_release=None):
|
|
|
|
ToTestBase._release(self, set_release)
|
|
|
|
|
|
|
|
def jobs_num(self):
|
|
|
|
return 13
|
2017-12-17 10:00:24 +01:00
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
class ToTestSLE150(ToTestBaseNew):
|
|
|
|
main_products = [
|
|
|
|
'000product:SLES-cd-DVD-aarch64',
|
|
|
|
'000product:SLES-cd-DVD-ppc64le',
|
|
|
|
'000product:SLES-cd-DVD-s390x',
|
|
|
|
'000product:SLES-cd-DVD-x86_64',
|
|
|
|
]
|
2014-05-27 13:26:29 +02:00
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
ftp_products = [
|
|
|
|
'000product:SLES-ftp-POOL-aarch64',
|
|
|
|
'000product:SLES-ftp-POOL-ppc64le',
|
|
|
|
'000product:SLES-ftp-POOL-s390x',
|
|
|
|
'000product:SLES-ftp-POOL-x86_64',
|
|
|
|
]
|
2017-02-02 17:41:24 +01:00
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
livecd_products = []
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToTestBaseNew.__init__(self, test_subproject='TEST', *args, **kwargs)
|
|
|
|
|
|
|
|
def openqa_group(self):
|
|
|
|
return 'Functional'
|
|
|
|
|
|
|
|
def get_current_snapshot(self):
|
|
|
|
return self.iso_build_version(self.project + ':TEST', self.main_products[0])
|
|
|
|
|
|
|
|
def ftp_build_version(self, project, tree):
|
|
|
|
return super(ToTestSLE150, self).ftp_build_version(project, tree, base='SLE')
|
|
|
|
|
|
|
|
def iso_build_version(self, project, tree):
|
|
|
|
return super(ToTestSLE150, self).iso_build_version(project, tree, base='SLE')
|
2015-04-14 13:39:48 +02:00
|
|
|
|
2017-03-29 14:24:11 +02:00
|
|
|
|
2015-04-14 13:39:48 +02:00
|
|
|
class CommandlineInterface(cmdln.Cmdln):
|
2017-03-29 14:24:11 +02:00
|
|
|
|
2015-04-14 13:39:48 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
cmdln.Cmdln.__init__(self, args, kwargs)
|
|
|
|
|
|
|
|
self.totest_class = {
|
2017-11-14 23:42:24 +01:00
|
|
|
'openSUSE:Factory': ToTestFactory,
|
|
|
|
'openSUSE:Factory:PowerPC': ToTestFactoryPowerPC,
|
|
|
|
'openSUSE:Factory:ARM': ToTestFactoryARM,
|
|
|
|
'openSUSE:Factory:zSystems': ToTestFactoryzSystems,
|
|
|
|
'openSUSE:Leap:15.0': ToTest150,
|
2017-12-17 10:00:24 +01:00
|
|
|
'openSUSE:Leap:15.0:Ports': ToTest150Ports,
|
2018-02-13 16:51:35 +01:00
|
|
|
'openSUSE:Leap:15.0:Images': ToTest150Images,
|
2017-11-14 23:42:24 +01:00
|
|
|
'SUSE:SLE-15:GA': ToTestSLE150,
|
|
|
|
}
|
|
|
|
self.openqa_server = {
|
|
|
|
'openSUSE': 'https://openqa.opensuse.org',
|
|
|
|
'SLE': 'https://openqa.suse.de',
|
|
|
|
}
|
|
|
|
self.api_url = {
|
|
|
|
'openSUSE': 'https://api.opensuse.org',
|
|
|
|
'SLE': 'https://api.suse.de',
|
2015-04-14 13:39:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
def get_optparser(self):
|
|
|
|
parser = cmdln.CmdlnOptionParser(self)
|
|
|
|
parser.add_option("--dry", action="store_true", help="dry run")
|
|
|
|
parser.add_option("--debug", action="store_true", help="debug output")
|
|
|
|
parser.add_option("--verbose", action="store_true", help="verbose")
|
2017-03-29 14:24:11 +02:00
|
|
|
parser.add_option(
|
|
|
|
"--osc-debug", action="store_true", help="osc debug output")
|
2017-11-14 23:42:24 +01:00
|
|
|
parser.add_option(
|
|
|
|
"--project-base", help="""Select base of OBS/IBS project as well as openQA server based on distribution family, e.g. 'openSUSE' or 'SLE', default:
|
|
|
|
'openSUSE'""")
|
|
|
|
parser.add_option(
|
|
|
|
"--openqa-server", help="""Full URL to the openQA server that should be queried, default based on '--project-base' selection, e.g.
|
|
|
|
'https://openqa.opensuse.org' for 'openSUSE'""")
|
|
|
|
parser.add_option(
|
|
|
|
"--obs-api-url", help="""Full URL to OBS instance to be queried, default based on '--project-base' selection, e.g.
|
|
|
|
'https://api.opensuse.org' for 'openSUSE'""")
|
|
|
|
return parser
|
2015-04-14 13:39:48 +02:00
|
|
|
return parser
|
|
|
|
|
|
|
|
def postoptparse(self):
|
2015-07-01 11:48:23 +02:00
|
|
|
level = None
|
2015-04-14 13:39:48 +02:00
|
|
|
if (self.options.debug):
|
2015-07-01 11:48:23 +02:00
|
|
|
level = logging.DEBUG
|
2015-04-14 13:39:48 +02:00
|
|
|
elif (self.options.verbose):
|
2015-07-01 11:48:23 +02:00
|
|
|
level = logging.INFO
|
2017-02-13 18:04:46 +01:00
|
|
|
|
|
|
|
fmt = '%(module)s:%(lineno)d %(levelname)s %(message)s'
|
|
|
|
if os.isatty(0):
|
|
|
|
fmt = '%(asctime)s - ' + fmt
|
|
|
|
|
|
|
|
logging.basicConfig(level=level, format=fmt)
|
2015-04-14 13:39:48 +02:00
|
|
|
|
|
|
|
osc.conf.get_config()
|
|
|
|
if (self.options.osc_debug):
|
|
|
|
osc.conf.config['debug'] = True
|
2017-11-14 23:42:24 +01:00
|
|
|
if not self.options.project_base:
|
|
|
|
self.options.project_base = 'openSUSE'
|
|
|
|
if not self.options.openqa_server:
|
|
|
|
self.options.openqa_server = self.openqa_server[self.options.project_base]
|
|
|
|
if not self.options.obs_api_url:
|
|
|
|
self.options.obs_api_url = self.api_url[self.options.project_base]
|
2015-04-14 13:39:48 +02:00
|
|
|
|
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
def _setup_totest(self, project):
|
|
|
|
fallback_project = 'openSUSE:%s' % project
|
|
|
|
if project not in self.totest_class and fallback_project in self.totest_class:
|
|
|
|
project = fallback_project
|
|
|
|
Config(project)
|
2015-04-14 13:39:48 +02:00
|
|
|
if project not in self.totest_class:
|
2017-03-29 14:24:11 +02:00
|
|
|
msg = 'Project %s not recognized. Possible values [%s]' % (
|
|
|
|
project, ', '.join(self.totest_class))
|
2015-06-26 09:09:39 +02:00
|
|
|
raise cmdln.CmdlnUserError(msg)
|
2015-04-14 13:39:48 +02:00
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
return self.totest_class[project](project, self.options.dry, self.options.obs_api_url, self.options.openqa_server)
|
2015-04-14 13:39:48 +02:00
|
|
|
|
2015-04-20 15:00:02 +02:00
|
|
|
@cmdln.option('-n', '--interval', metavar="minutes", type="int", help="periodic interval in minutes")
|
2017-11-14 23:42:24 +01:00
|
|
|
def do_run(self, subcmd, opts, project='openSUSE:Factory'):
|
2015-04-14 13:39:48 +02:00
|
|
|
"""${cmd_name}: run the ToTest Manager
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2015-04-20 15:00:02 +02:00
|
|
|
class ExTimeout(Exception):
|
2017-03-29 14:24:11 +02:00
|
|
|
|
2015-04-20 15:00:02 +02:00
|
|
|
"""raised on timeout"""
|
|
|
|
|
|
|
|
if opts.interval:
|
|
|
|
def alarm_called(nr, frame):
|
|
|
|
raise ExTimeout()
|
|
|
|
signal.signal(signal.SIGALRM, alarm_called)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
totest = self._setup_totest(project)
|
|
|
|
totest.totest()
|
2017-10-16 22:47:58 +02:00
|
|
|
except Exception as e:
|
2016-05-09 17:12:13 +02:00
|
|
|
logger.error(e)
|
2015-04-20 15:00:02 +02:00
|
|
|
|
|
|
|
if opts.interval:
|
2017-01-26 17:05:21 +01:00
|
|
|
if os.isatty(0):
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info(
|
|
|
|
"sleeping %d minutes. Press enter to check now ..." % opts.interval)
|
|
|
|
signal.alarm(opts.interval * 60)
|
2017-01-26 17:05:21 +01:00
|
|
|
try:
|
|
|
|
raw_input()
|
|
|
|
except ExTimeout:
|
|
|
|
pass
|
|
|
|
signal.alarm(0)
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info("recheck at %s" %
|
|
|
|
datetime.datetime.now().isoformat())
|
2017-01-26 17:05:21 +01:00
|
|
|
else:
|
2017-03-29 14:24:11 +02:00
|
|
|
logger.info("sleeping %d minutes." % opts.interval)
|
|
|
|
time.sleep(opts.interval * 60)
|
2017-01-26 20:25:16 +01:00
|
|
|
continue
|
2015-04-20 15:00:02 +02:00
|
|
|
break
|
2015-04-14 13:39:48 +02:00
|
|
|
|
2017-11-14 23:42:24 +01:00
|
|
|
def do_release(self, subcmd, opts, project='openSUSE:Factory'):
|
2015-04-14 13:39:48 +02:00
|
|
|
"""${cmd_name}: manually release all media. Use with caution!
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
totest = self._setup_totest(project)
|
|
|
|
|
|
|
|
totest.release()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
app = CommandlineInterface()
|
2017-03-29 14:24:11 +02:00
|
|
|
sys.exit(app.main())
|
2015-04-14 13:39:48 +02:00
|
|
|
|