2019-02-22 18:00:42 +01:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
import cmdln
|
|
|
|
from collections import namedtuple
|
|
|
|
import hashlib
|
|
|
|
from lxml import etree as ET
|
|
|
|
import os
|
|
|
|
import pipes
|
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tempfile
|
|
|
|
import osc.core
|
|
|
|
import argparse
|
|
|
|
import logging
|
|
|
|
|
|
|
|
from osclib.cache_manager import CacheManager
|
|
|
|
from osc import conf
|
|
|
|
from osclib.conf import Config
|
|
|
|
from osclib.conf import str2bool
|
|
|
|
from osclib.core import BINARY_REGEX
|
|
|
|
from osclib.core import builddepinfo
|
|
|
|
from osclib.core import depends_on
|
|
|
|
from osclib.core import devel_project_fallback
|
|
|
|
from osclib.core import fileinfo_ext_all
|
|
|
|
from osclib.core import package_binary_list
|
|
|
|
from osclib.core import project_meta_revision
|
|
|
|
from osclib.core import project_pseudometa_file_ensure
|
|
|
|
from osclib.core import project_pseudometa_file_load
|
|
|
|
from osclib.core import project_pseudometa_package
|
|
|
|
from osclib.core import repository_path_search
|
|
|
|
from osclib.core import repository_path_expand
|
|
|
|
from osclib.core import repositories_states
|
|
|
|
from osclib.core import repository_arch_state
|
|
|
|
from osclib.core import repositories_published
|
|
|
|
from osclib.core import target_archs
|
2019-03-12 07:15:14 +01:00
|
|
|
from osclib.comments import CommentAPI
|
2019-02-22 18:00:42 +01:00
|
|
|
from osclib.memoize import memoize
|
|
|
|
from osclib.util import sha1_short
|
|
|
|
from osclib.stagingapi import StagingAPI
|
|
|
|
|
2019-03-08 15:19:00 +01:00
|
|
|
try:
|
|
|
|
from urllib.error import HTTPError
|
|
|
|
except ImportError:
|
|
|
|
# python 2.x
|
|
|
|
from urllib2 import HTTPError
|
|
|
|
|
2019-02-22 18:00:42 +01:00
|
|
|
import ReviewBot
|
|
|
|
|
|
|
|
CACHEDIR = CacheManager.directory('repository-meta')
|
|
|
|
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
CheckResult = namedtuple('CheckResult', ('success', 'comment'))
|
|
|
|
INSTALL_REGEX = r"^(?:can't install (.*?)|found conflict of (.*?) with (.*?)):$"
|
|
|
|
InstallSection = namedtuple('InstallSection', ('binaries', 'text'))
|
|
|
|
|
|
|
|
ERROR_REPO_SPECIFIED = 'a repository must be specified via OSRT:Config main-repo for {}'
|
|
|
|
|
|
|
|
class InstallChecker(object):
|
|
|
|
def __init__(self, api, config):
|
|
|
|
self.api = api
|
|
|
|
self.config = conf.config[api.project]
|
|
|
|
self.logger = logging.getLogger('InstallChecker')
|
2019-03-12 07:15:14 +01:00
|
|
|
self.commentapi = CommentAPI(api.apiurl)
|
2019-02-22 18:00:42 +01:00
|
|
|
|
|
|
|
self.arch_whitelist = self.config.get('repo_checker-arch-whitelist')
|
|
|
|
if self.arch_whitelist:
|
|
|
|
self.arch_whitelist = set(self.arch_whitelist.split(' '))
|
|
|
|
|
2019-04-04 07:28:17 +02:00
|
|
|
self.ring_whitelist = set(self.config.get('repo_checker-binary-whitelist-ring', '').split(' '))
|
2019-02-22 18:00:42 +01:00
|
|
|
|
|
|
|
self.cycle_packages = self.config.get('repo_checker-allowed-in-cycles')
|
|
|
|
self.calculate_allowed_cycles()
|
|
|
|
|
2019-03-08 20:51:54 +01:00
|
|
|
self.existing_problems = self.binary_list_existing_problem(api.project, api.cmain_repo)
|
|
|
|
|
2019-03-27 07:34:20 +01:00
|
|
|
def check_required_by(self, fileinfo, provides, requiredby, built_binaries, comments):
|
2019-03-08 15:19:00 +01:00
|
|
|
if requiredby.get('name') in built_binaries:
|
|
|
|
return True
|
|
|
|
# extract >= and the like
|
|
|
|
provide = provides.get('dep')
|
|
|
|
provide = provide.split(' ')[0]
|
2019-03-27 07:34:20 +01:00
|
|
|
comments.append('{} provides {} required by {}'.format(fileinfo.find('name').text, provide, requiredby.get('name')))
|
2019-03-08 15:19:00 +01:00
|
|
|
url = api.makeurl(['build', api.project, api.cmain_repo, 'x86_64', '_repository', requiredby.get('name') + '.rpm'],
|
|
|
|
{'view': 'fileinfo_ext'})
|
|
|
|
reverse_fileinfo = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
for require in reverse_fileinfo.findall('requires_ext'):
|
|
|
|
# extract >= and the like here too
|
|
|
|
dep = require.get('dep').split(' ')[0]
|
|
|
|
if dep != provide:
|
|
|
|
continue
|
|
|
|
for provided_by in require.findall('providedby'):
|
|
|
|
if provided_by.get('name') in built_binaries:
|
|
|
|
continue
|
2019-03-27 07:34:20 +01:00
|
|
|
comments.append(' also provided by {} -> ignoring'.format(provided_by.get('name')))
|
2019-03-08 15:19:00 +01:00
|
|
|
return True
|
2019-03-27 07:34:20 +01:00
|
|
|
comments.append('Error: missing alternative provides for {}'.format(provide))
|
2019-03-08 15:19:00 +01:00
|
|
|
return False
|
|
|
|
|
2019-03-27 07:34:20 +01:00
|
|
|
def check_delete_request(self, req, to_ignore, comments):
|
2019-03-08 15:19:00 +01:00
|
|
|
package = req['package']
|
2019-03-12 07:15:14 +01:00
|
|
|
if package in to_ignore:
|
|
|
|
self.logger.info('Delete request for package {} ignored'.format(package))
|
|
|
|
return True
|
|
|
|
|
2019-03-08 15:19:00 +01:00
|
|
|
built_binaries = set([])
|
|
|
|
file_infos = []
|
|
|
|
for fileinfo in fileinfo_ext_all(self.api.apiurl, self.api.project, self.api.cmain_repo, 'x86_64', package):
|
|
|
|
built_binaries.add(fileinfo.find('name').text)
|
|
|
|
file_infos.append(fileinfo)
|
|
|
|
|
|
|
|
result = True
|
|
|
|
for fileinfo in file_infos:
|
|
|
|
for provides in fileinfo.findall('provides_ext'):
|
|
|
|
for requiredby in provides.findall('requiredby[@name]'):
|
2019-03-27 07:34:20 +01:00
|
|
|
result = result and self.check_required_by(fileinfo, provides, requiredby, built_binaries, comments)
|
2019-03-08 15:19:00 +01:00
|
|
|
|
|
|
|
what_depends_on = depends_on(api.apiurl, api.project, api.cmain_repo, [package], True)
|
|
|
|
|
|
|
|
# filter out dependency on package itself (happens with eg
|
|
|
|
# java bootstrapping itself with previous build)
|
|
|
|
if package in what_depends_on:
|
|
|
|
what_depends_on.remove(package)
|
|
|
|
|
|
|
|
if len(what_depends_on):
|
2019-03-27 07:34:20 +01:00
|
|
|
comments.append('{} is still a build requirement of:\n\n- {}'.format(
|
2019-03-08 15:19:00 +01:00
|
|
|
package, '\n- '.join(sorted(what_depends_on))))
|
|
|
|
return False
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2019-03-12 07:15:14 +01:00
|
|
|
def packages_to_ignore(self, project):
|
|
|
|
comments = self.commentapi.get_comments(project_name=project)
|
|
|
|
ignore_re = re.compile(r'^installcheck: ignore (?P<args>.*)$', re.MULTILINE)
|
|
|
|
|
|
|
|
# the last wins, for now we don't care who said it
|
|
|
|
args = []
|
|
|
|
for comment in comments.values():
|
|
|
|
match = ignore_re.search(comment['comment'].replace('\r', ''))
|
|
|
|
if not match:
|
|
|
|
continue
|
|
|
|
args = match.group('args').strip()
|
|
|
|
# allow space and comma to seperate
|
|
|
|
args = args.replace(',', ' ').split(' ')
|
|
|
|
return args
|
|
|
|
|
|
|
|
def staging(self, project, force=False):
|
2019-02-22 18:00:42 +01:00
|
|
|
api = self.api
|
|
|
|
|
2019-03-08 15:19:00 +01:00
|
|
|
repository = self.api.cmain_repo
|
2019-02-22 18:00:42 +01:00
|
|
|
|
2019-03-08 15:19:00 +01:00
|
|
|
# fetch the build ids at the beginning - mirroring takes a while
|
|
|
|
buildids = {}
|
2019-03-09 07:51:47 +01:00
|
|
|
try:
|
|
|
|
architectures = self.target_archs(project, repository)
|
|
|
|
except HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
# adi disappear all the time, so don't worry
|
|
|
|
return False
|
|
|
|
raise e
|
|
|
|
|
2019-03-08 20:22:29 +01:00
|
|
|
all_done = True
|
2019-03-08 15:19:00 +01:00
|
|
|
for arch in architectures:
|
2019-03-08 20:22:29 +01:00
|
|
|
pra = '{}/{}/{}'.format(project, repository, arch)
|
|
|
|
buildid = self.buildid(project, repository, arch)
|
|
|
|
if not buildid:
|
|
|
|
self.logger.error('No build ID in {}'.format(pra))
|
2019-03-08 15:19:00 +01:00
|
|
|
return False
|
2019-03-08 20:22:29 +01:00
|
|
|
buildids[arch] = buildid
|
|
|
|
url = self.report_url(project, repository, arch, buildid)
|
|
|
|
try:
|
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
check = root.find('check[@name="installcheck"]/state')
|
2019-03-09 07:49:47 +01:00
|
|
|
if check is not None and check.text != 'pending':
|
|
|
|
self.logger.info('{} already "{}", ignoring'.format(pra, check.text))
|
2019-03-08 20:22:29 +01:00
|
|
|
else:
|
|
|
|
all_done = False
|
|
|
|
except HTTPError:
|
|
|
|
self.logger.info('{} has no status report'.format(pra))
|
|
|
|
all_done = False
|
|
|
|
|
2019-03-12 07:15:14 +01:00
|
|
|
if all_done and not force:
|
2019-03-08 20:22:29 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
repository_pairs = repository_path_expand(api.apiurl, project, repository)
|
|
|
|
staging_pair = [project, repository]
|
2019-03-08 15:19:00 +01:00
|
|
|
|
2019-02-22 18:00:42 +01:00
|
|
|
result = True
|
|
|
|
|
|
|
|
status = api.project_status(project)
|
|
|
|
if not status:
|
|
|
|
self.logger.error('no project status for {}'.format(project))
|
|
|
|
return False
|
|
|
|
|
2019-03-27 07:34:20 +01:00
|
|
|
result_comment = []
|
|
|
|
|
2019-03-12 07:15:14 +01:00
|
|
|
to_ignore = self.packages_to_ignore(project)
|
2019-02-22 18:00:42 +01:00
|
|
|
meta = api.load_prj_pseudometa(status['description'])
|
|
|
|
for req in meta['requests']:
|
|
|
|
if req['type'] == 'delete':
|
2019-03-27 07:34:20 +01:00
|
|
|
result = result and self.check_delete_request(req, to_ignore, result_comment)
|
2019-03-20 11:38:08 +01:00
|
|
|
|
2019-03-08 15:19:00 +01:00
|
|
|
for arch in architectures:
|
2019-02-22 18:00:42 +01:00
|
|
|
# hit the first repository in the target project (if existant)
|
|
|
|
target_pair = None
|
|
|
|
directories = []
|
|
|
|
for pair_project, pair_repository in repository_pairs:
|
|
|
|
# ignore repositories only inherited for config
|
|
|
|
if repository_arch_state(self.api.apiurl, pair_project, pair_repository, arch):
|
|
|
|
if not target_pair and pair_project == api.project:
|
|
|
|
target_pair = [pair_project, pair_repository]
|
|
|
|
|
|
|
|
directories.append(self.mirror(pair_project, pair_repository, arch))
|
|
|
|
|
|
|
|
if not api.is_adi_project(project):
|
|
|
|
# For "leaky" ring packages in letter stagings, where the
|
|
|
|
# repository setup does not include the target project, that are
|
|
|
|
# not intended to to have all run-time dependencies satisfied.
|
|
|
|
whitelist = self.ring_whitelist
|
|
|
|
else:
|
2019-03-08 20:51:54 +01:00
|
|
|
whitelist = self.existing_problems
|
2019-02-22 18:00:42 +01:00
|
|
|
|
2019-04-01 20:15:35 +02:00
|
|
|
whitelist |= set(to_ignore)
|
|
|
|
|
2019-02-22 18:00:42 +01:00
|
|
|
check = self.cycle_check(project, repository, arch)
|
|
|
|
if not check.success:
|
2019-05-11 14:25:02 +02:00
|
|
|
self.logger.warning('Cycle check failed')
|
2019-03-27 07:57:07 +01:00
|
|
|
result_comment.append(check.comment)
|
2019-02-22 18:00:42 +01:00
|
|
|
result = False
|
|
|
|
|
|
|
|
check = self.install_check(target_pair, arch, directories, None, whitelist)
|
|
|
|
if not check.success:
|
2019-05-11 14:25:02 +02:00
|
|
|
self.logger.warning('Install check failed')
|
2019-03-27 07:57:07 +01:00
|
|
|
result_comment.append(check.comment)
|
2019-02-22 18:00:42 +01:00
|
|
|
result = False
|
|
|
|
|
|
|
|
if result:
|
2019-03-20 11:38:08 +01:00
|
|
|
self.report_state('success', self.gocd_url(), project, repository, buildids)
|
2019-02-22 18:00:42 +01:00
|
|
|
else:
|
2019-03-27 07:57:07 +01:00
|
|
|
result_comment.insert(0, 'Generated from {}\n'.format(self.gocd_url()))
|
2019-03-20 11:38:08 +01:00
|
|
|
self.report_state('failure', self.upload_failure(project, result_comment), project, repository, buildids)
|
2019-05-11 14:25:02 +02:00
|
|
|
self.logger.warning('Not accepting {}'.format(project))
|
2019-02-22 18:00:42 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2019-03-20 11:38:08 +01:00
|
|
|
def upload_failure(self, project, comment):
|
2019-03-27 07:57:07 +01:00
|
|
|
print(project, '\n'.join(comment))
|
2019-03-20 11:38:08 +01:00
|
|
|
url = self.api.makeurl(['source', 'home:repo-checker', 'reports', project])
|
2019-03-27 09:06:35 +01:00
|
|
|
osc.core.http_PUT(url, data='\n'.join(comment))
|
2019-03-20 11:38:08 +01:00
|
|
|
|
2019-03-26 13:47:06 +01:00
|
|
|
url = self.api.apiurl.replace('api.', 'build.')
|
|
|
|
return '{}/package/view_file/home:repo-checker/reports/{}'.format(url, project)
|
2019-03-20 11:38:08 +01:00
|
|
|
|
|
|
|
def report_state(self, state, report_url, project, repository, buildids):
|
2019-02-22 18:00:42 +01:00
|
|
|
architectures = self.target_archs(project, repository)
|
|
|
|
for arch in architectures:
|
2019-03-20 11:38:08 +01:00
|
|
|
self.report_pipeline(state, report_url, project, repository, arch, buildids[arch], arch == architectures[-1])
|
2019-02-22 18:00:42 +01:00
|
|
|
|
|
|
|
def gocd_url(self):
|
|
|
|
if not os.environ.get('GO_SERVER_URL'):
|
2019-03-08 20:22:29 +01:00
|
|
|
# placeholder :)
|
|
|
|
return 'http://stephan.kulow.org/'
|
2019-02-22 18:00:42 +01:00
|
|
|
report_url = os.environ.get('GO_SERVER_URL').replace(':8154', '')
|
|
|
|
return report_url + '/tab/build/detail/{}/{}/{}/{}/{}#tab-console'.format(os.environ.get('GO_PIPELINE_NAME'),
|
|
|
|
os.environ.get('GO_PIPELINE_COUNTER'),
|
|
|
|
os.environ.get('GO_STAGE_NAME'),
|
|
|
|
os.environ.get('GO_STAGE_COUNTER'),
|
|
|
|
os.environ.get('GO_JOB_NAME'))
|
|
|
|
|
|
|
|
def buildid(self, project, repository, architecture):
|
|
|
|
url = self.api.makeurl(['build', project, repository, architecture], {'view': 'status'})
|
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
buildid = root.find('buildid')
|
|
|
|
if buildid is None:
|
|
|
|
return False
|
|
|
|
return buildid.text
|
|
|
|
|
2019-03-08 20:22:29 +01:00
|
|
|
def report_url(self, project, repository, architecture, buildid):
|
|
|
|
return self.api.makeurl(['status_reports', 'built', project,
|
2019-02-22 18:00:42 +01:00
|
|
|
repository, architecture, 'reports', buildid])
|
2019-03-08 20:22:29 +01:00
|
|
|
|
2019-03-20 11:38:08 +01:00
|
|
|
def report_pipeline(self, state, report_url, project, repository, architecture, buildid, is_last):
|
2019-03-08 20:22:29 +01:00
|
|
|
url = self.report_url(project, repository, architecture, buildid)
|
2019-02-22 18:00:42 +01:00
|
|
|
name = 'installcheck'
|
|
|
|
# this is a little bit ugly, but we don't need 2 failures. So save a success for the
|
|
|
|
# other archs to mark them as visited - pending we put in both
|
|
|
|
if not is_last:
|
|
|
|
if state == 'failure':
|
|
|
|
state = 'success'
|
|
|
|
|
2019-03-20 11:38:08 +01:00
|
|
|
xml = self.check_xml(report_url, state, name)
|
2019-02-22 18:00:42 +01:00
|
|
|
try:
|
|
|
|
osc.core.http_POST(url, data=xml)
|
|
|
|
except HTTPError:
|
|
|
|
print('failed to post status to ' + url)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
def check_xml(self, url, state, name):
|
|
|
|
check = ET.Element('check')
|
|
|
|
if url:
|
|
|
|
se = ET.SubElement(check, 'url')
|
|
|
|
se.text = url
|
|
|
|
se = ET.SubElement(check, 'state')
|
|
|
|
se.text = state
|
|
|
|
se = ET.SubElement(check, 'name')
|
|
|
|
se.text = name
|
|
|
|
return ET.tostring(check)
|
|
|
|
|
|
|
|
def target_archs(self, project, repository):
|
|
|
|
archs = target_archs(self.api.apiurl, project, repository)
|
|
|
|
|
|
|
|
# Check for arch whitelist and use intersection.
|
|
|
|
if self.arch_whitelist:
|
|
|
|
archs = list(self.arch_whitelist.intersection(set(archs)))
|
|
|
|
|
|
|
|
# Trick to prioritize x86_64.
|
|
|
|
return sorted(archs, reverse=True)
|
|
|
|
|
|
|
|
@memoize(ttl=60, session=True, add_invalidate=True)
|
|
|
|
def mirror(self, project, repository, arch):
|
|
|
|
"""Call bs_mirrorfull script to mirror packages."""
|
|
|
|
directory = os.path.join(CACHEDIR, project, repository, arch)
|
|
|
|
if not os.path.exists(directory):
|
|
|
|
os.makedirs(directory)
|
|
|
|
|
|
|
|
script = os.path.join(SCRIPT_PATH, 'bs_mirrorfull')
|
|
|
|
path = '/'.join((project, repository, arch))
|
|
|
|
url = '{}/public/build/{}'.format(self.api.apiurl, path)
|
|
|
|
parts = ['LC_ALL=C', 'perl', script, '--nodebug', url, directory]
|
|
|
|
parts = [pipes.quote(part) for part in parts]
|
|
|
|
|
|
|
|
self.logger.info('mirroring {}'.format(path))
|
|
|
|
if os.system(' '.join(parts)):
|
|
|
|
raise Exception('failed to mirror {}'.format(path))
|
|
|
|
|
|
|
|
return directory
|
|
|
|
|
|
|
|
@memoize(session=True)
|
|
|
|
def binary_list_existing_problem(self, project, repository):
|
|
|
|
"""Determine which binaries are mentioned in repo_checker output."""
|
|
|
|
binaries = set()
|
|
|
|
|
|
|
|
filename = self.project_pseudometa_file_name(project, repository)
|
|
|
|
content = project_pseudometa_file_load(self.api.apiurl, project, filename)
|
|
|
|
if not content:
|
2019-05-11 14:25:02 +02:00
|
|
|
self.logger.warning('no project_only run from which to extract existing problems')
|
2019-02-22 18:00:42 +01:00
|
|
|
return binaries
|
|
|
|
|
|
|
|
sections = self.install_check_parse(content)
|
|
|
|
for section in sections:
|
|
|
|
for binary in section.binaries:
|
|
|
|
match = re.match(BINARY_REGEX, binary)
|
|
|
|
if match:
|
|
|
|
binaries.add(match.group('name'))
|
|
|
|
|
|
|
|
return binaries
|
|
|
|
|
|
|
|
def install_check(self, target_project_pair, arch, directories,
|
|
|
|
ignore=None, whitelist=[], parse=False, no_filter=False):
|
|
|
|
self.logger.info('install check: start (ignore:{}, whitelist:{}, parse:{}, no_filter:{})'.format(
|
|
|
|
bool(ignore), len(whitelist), parse, no_filter))
|
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile() as ignore_file:
|
|
|
|
# Print ignored rpms on separate lines in ignore file.
|
|
|
|
if ignore:
|
|
|
|
for item in ignore:
|
|
|
|
ignore_file.write(item + '\n')
|
|
|
|
ignore_file.flush()
|
|
|
|
|
|
|
|
# Invoke repo_checker.pl to perform an install check.
|
|
|
|
script = os.path.join(SCRIPT_PATH, 'repo_checker.pl')
|
|
|
|
parts = ['LC_ALL=C', 'perl', script, arch, ','.join(directories),
|
|
|
|
'-f', ignore_file.name, '-w', ','.join(whitelist)]
|
|
|
|
if no_filter:
|
|
|
|
parts.append('--no-filter')
|
|
|
|
|
|
|
|
parts = [pipes.quote(part) for part in parts]
|
|
|
|
p = subprocess.Popen(' '.join(parts), shell=True,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE, close_fds=True)
|
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
|
|
|
|
if p.returncode:
|
|
|
|
self.logger.info('install check: failed')
|
|
|
|
if p.returncode == 126:
|
2019-05-11 14:25:02 +02:00
|
|
|
self.logger.warning('mirror cache reset due to corruption')
|
2019-02-22 18:00:42 +01:00
|
|
|
self._invalidate_all()
|
|
|
|
elif parse:
|
|
|
|
# Parse output for later consumption for posting comments.
|
|
|
|
sections = self.install_check_parse(stdout)
|
|
|
|
self.install_check_sections_group(
|
|
|
|
target_project_pair[0], target_project_pair[1], arch, sections)
|
|
|
|
|
|
|
|
# Format output as markdown comment.
|
|
|
|
parts = []
|
|
|
|
|
2019-05-11 16:01:54 +02:00
|
|
|
stdout = stdout.decode('utf-8').strip()
|
2019-02-22 18:00:42 +01:00
|
|
|
if stdout:
|
2019-03-20 11:38:08 +01:00
|
|
|
parts.append(stdout + '\n')
|
2019-02-22 18:00:42 +01:00
|
|
|
stderr = stderr.strip()
|
|
|
|
if stderr:
|
2019-03-20 11:38:08 +01:00
|
|
|
parts.append(stderr + '\n')
|
2019-02-22 18:00:42 +01:00
|
|
|
|
2019-03-20 13:15:30 +01:00
|
|
|
header = '### [install check & file conflicts for {}]'.format(arch)
|
2019-02-22 18:00:42 +01:00
|
|
|
return CheckResult(False, header + '\n\n' + ('\n' + ('-' * 80) + '\n\n').join(parts))
|
|
|
|
|
|
|
|
self.logger.info('install check: passed')
|
|
|
|
return CheckResult(True, None)
|
|
|
|
|
|
|
|
def install_check_sections_group(self, project, repository, arch, sections):
|
|
|
|
_, binary_map = package_binary_list(self.api.apiurl, project, repository, arch)
|
|
|
|
|
|
|
|
for section in sections:
|
|
|
|
# If switch to creating bugs likely makes sense to join packages to
|
|
|
|
# form grouping key and create shared bugs for conflicts.
|
|
|
|
# Added check for b in binary_map after encountering:
|
|
|
|
# https://lists.opensuse.org/opensuse-buildservice/2017-08/msg00035.html
|
|
|
|
# Under normal circumstances this should never occur.
|
|
|
|
packages = set([binary_map[b] for b in section.binaries if b in binary_map])
|
|
|
|
for package in packages:
|
|
|
|
self.package_results.setdefault(package, [])
|
|
|
|
self.package_results[package].append(section)
|
|
|
|
|
|
|
|
def install_check_parse(self, output):
|
|
|
|
section = None
|
|
|
|
text = None
|
|
|
|
|
|
|
|
# Loop over lines and parse into chunks assigned to binaries.
|
|
|
|
for line in output.splitlines(True):
|
|
|
|
if line.startswith(' '):
|
|
|
|
if section:
|
|
|
|
text += line
|
|
|
|
else:
|
|
|
|
if section:
|
|
|
|
yield InstallSection(section, text)
|
|
|
|
|
|
|
|
match = re.match(INSTALL_REGEX, line)
|
|
|
|
if match:
|
|
|
|
# Remove empty groups since regex matches different patterns.
|
|
|
|
binaries = [b for b in match.groups() if b is not None]
|
|
|
|
section = binaries
|
|
|
|
text = line
|
|
|
|
else:
|
|
|
|
section = None
|
|
|
|
|
|
|
|
if section:
|
|
|
|
yield InstallSection(section, text)
|
|
|
|
|
|
|
|
def calculate_allowed_cycles(self):
|
|
|
|
self.allowed_cycles = []
|
|
|
|
if self.cycle_packages:
|
|
|
|
for comma_list in self.cycle_packages.split(';'):
|
|
|
|
self.allowed_cycles.append(comma_list.split(','))
|
|
|
|
|
|
|
|
def cycle_check(self, project, repository, arch):
|
|
|
|
self.logger.info('cycle check: start %s/%s/%s' % (project, repository, arch))
|
|
|
|
comment = []
|
|
|
|
|
|
|
|
depinfo = builddepinfo(self.api.apiurl, project, repository, arch, order = False)
|
|
|
|
for cycle in depinfo.findall('cycle'):
|
|
|
|
for package in cycle.findall('package'):
|
|
|
|
package = package.text
|
|
|
|
allowed = False
|
|
|
|
for acycle in self.allowed_cycles:
|
|
|
|
if package in acycle:
|
|
|
|
allowed = True
|
|
|
|
break
|
|
|
|
if not allowed:
|
|
|
|
cycled = [p.text for p in cycle.findall('package')]
|
|
|
|
comment.append('Package {} appears in cycle {}'.format(package, '/'.join(cycled)))
|
|
|
|
|
|
|
|
if len(comment):
|
|
|
|
# New cycles, post comment.
|
|
|
|
self.logger.info('cycle check: failed')
|
|
|
|
return CheckResult(False, '\n'.join(comment) + '\n')
|
|
|
|
|
|
|
|
self.logger.info('cycle check: passed')
|
|
|
|
return CheckResult(True, None)
|
|
|
|
|
|
|
|
def project_pseudometa_file_name(self, project, repository):
|
|
|
|
filename = 'repo_checker'
|
|
|
|
|
|
|
|
main_repo = Config.get(self.api.apiurl, project).get('main-repo')
|
|
|
|
if not main_repo:
|
|
|
|
filename += '.' + repository
|
|
|
|
|
|
|
|
return filename
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Do an installcheck on staging project')
|
|
|
|
parser.add_argument('-s', '--staging', type=str, default=None,
|
|
|
|
help='staging project')
|
|
|
|
parser.add_argument('-p', '--project', type=str, default='openSUSE:Factory',
|
|
|
|
help='project to check (ex. openSUSE:Factory, openSUSE:Leap:15.1)')
|
|
|
|
parser.add_argument('-d', '--debug', action='store_true', default=False,
|
|
|
|
help='enable debug information')
|
|
|
|
parser.add_argument('-A', '--apiurl', metavar='URL', help='API URL')
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
osc.conf.get_config(override_apiurl=args.apiurl)
|
|
|
|
osc.conf.config['debug'] = args.debug
|
|
|
|
|
|
|
|
apiurl = osc.conf.config['apiurl']
|
|
|
|
config = Config(apiurl, args.project)
|
|
|
|
api = StagingAPI(apiurl, args.project)
|
|
|
|
staging_report = InstallChecker(api, config)
|
|
|
|
|
|
|
|
if args.debug:
|
|
|
|
logging.basicConfig(level=logging.DEBUG)
|
|
|
|
else:
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
2019-03-11 16:45:55 +01:00
|
|
|
result = True
|
2019-02-22 18:00:42 +01:00
|
|
|
if args.staging:
|
2019-03-12 07:15:14 +01:00
|
|
|
result = staging_report.staging(api.prj_from_short(args.staging), force=True)
|
2019-03-08 20:22:29 +01:00
|
|
|
else:
|
|
|
|
for staging in api.get_staging_projects():
|
|
|
|
if api.is_adi_project(staging):
|
2019-03-18 19:12:33 +01:00
|
|
|
result = staging_report.staging(staging) and result
|
2019-03-11 16:45:55 +01:00
|
|
|
|
|
|
|
if not result:
|
|
|
|
sys.exit( 1 )
|