2019-06-06 07:30:28 +02:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
2019-06-06 15:12:11 +02:00
|
|
|
import datetime
|
|
|
|
import difflib
|
|
|
|
import hashlib
|
2019-06-06 07:30:28 +02:00
|
|
|
import logging
|
|
|
|
import os
|
2019-06-06 15:12:11 +02:00
|
|
|
import os.path
|
|
|
|
import re
|
|
|
|
import subprocess
|
2019-06-06 07:30:28 +02:00
|
|
|
import sys
|
2019-06-06 15:12:11 +02:00
|
|
|
import tempfile
|
2019-07-25 14:07:25 +02:00
|
|
|
import cmdln
|
2019-06-06 15:12:11 +02:00
|
|
|
from urllib.parse import urlencode
|
2019-06-06 07:30:28 +02:00
|
|
|
|
2019-06-06 15:12:11 +02:00
|
|
|
import yaml
|
|
|
|
from lxml import etree as ET
|
2019-06-06 07:30:28 +02:00
|
|
|
from osc import conf
|
2019-12-04 14:48:10 +01:00
|
|
|
from osc.core import http_request
|
2019-06-06 07:30:28 +02:00
|
|
|
|
|
|
|
import ToolBase
|
|
|
|
from osclib.conf import Config
|
2019-06-06 15:12:11 +02:00
|
|
|
from osclib.core import (http_GET, http_POST, makeurl,
|
|
|
|
project_pseudometa_file_ensure,
|
|
|
|
repository_path_expand, repository_path_search,
|
|
|
|
target_archs, source_file_load, source_file_ensure)
|
2019-12-10 08:36:19 +01:00
|
|
|
from osclib.repochecks import mirror, parsed_installcheck, CorruptRepos
|
2019-06-06 15:12:11 +02:00
|
|
|
|
2019-06-06 07:30:28 +02:00
|
|
|
class RepoChecker():
|
|
|
|
def __init__(self):
|
2019-06-06 15:12:11 +02:00
|
|
|
self.logger = logging.getLogger('RepoChecker')
|
2019-07-25 14:07:25 +02:00
|
|
|
self.store_project = None
|
|
|
|
self.store_package = None
|
2019-07-29 15:29:20 +02:00
|
|
|
self.rebuild = None
|
2019-07-25 14:07:25 +02:00
|
|
|
|
|
|
|
def parse_store(self, project_package):
|
|
|
|
if project_package:
|
|
|
|
self.store_project, self.store_package = project_package.split('/')
|
2019-06-06 07:30:28 +02:00
|
|
|
|
2019-07-29 15:29:20 +02:00
|
|
|
def check(self, project, repository):
|
|
|
|
if not repository:
|
|
|
|
repository = self.project_repository(project)
|
2019-06-06 07:30:28 +02:00
|
|
|
if not repository:
|
|
|
|
self.logger.error('a repository must be specified via OSRT:Config main-repo for {}'.format(project))
|
|
|
|
return
|
|
|
|
|
|
|
|
config = Config.get(self.apiurl, project)
|
|
|
|
|
|
|
|
archs = target_archs(self.apiurl, project, repository)
|
2019-07-29 15:29:20 +02:00
|
|
|
if not len(archs):
|
|
|
|
self.logger.debug('{} has no relevant architectures'.format(project))
|
|
|
|
return None
|
2019-06-06 07:30:28 +02:00
|
|
|
|
2019-07-29 15:29:20 +02:00
|
|
|
for arch in archs:
|
|
|
|
self.check_pra(project, repository, arch)
|
2019-06-06 07:30:28 +02:00
|
|
|
|
|
|
|
def project_pseudometa_file_name(self, project, repository):
|
|
|
|
filename = 'repo_checker'
|
|
|
|
|
|
|
|
main_repo = Config.get(self.apiurl, project).get('main-repo')
|
|
|
|
if not main_repo:
|
|
|
|
filename += '.' + repository
|
|
|
|
|
|
|
|
return filename
|
|
|
|
|
2019-06-06 15:12:11 +02:00
|
|
|
def _split_and_filter(self, output):
|
|
|
|
output = output.split("\n")
|
|
|
|
for lnr, line in enumerate(output):
|
|
|
|
if line.startswith('FOLLOWUP'):
|
|
|
|
# there can be multiple lines with missing providers
|
|
|
|
while lnr >= 0 and output[lnr - 1].endswith('none of the providers can be installed'):
|
|
|
|
output.pop()
|
|
|
|
lnr = lnr - 1
|
|
|
|
for lnr in reversed(range(len(output))):
|
|
|
|
# those lines are hardly interesting for us
|
|
|
|
if output[lnr].find('(we have') >= 0:
|
|
|
|
del output[lnr]
|
|
|
|
else:
|
2019-07-29 17:17:37 +02:00
|
|
|
output[lnr] = output[lnr]
|
2019-06-06 15:12:11 +02:00
|
|
|
return output
|
|
|
|
|
2019-06-06 07:30:28 +02:00
|
|
|
def project_repository(self, project):
|
|
|
|
repository = Config.get(self.apiurl, project).get('main-repo')
|
|
|
|
if not repository:
|
|
|
|
self.logger.debug('no main-repo defined for {}'.format(project))
|
|
|
|
|
|
|
|
search_project = 'openSUSE:Factory'
|
|
|
|
for search_repository in ('snapshot', 'standard'):
|
|
|
|
repository = repository_path_search(
|
|
|
|
self.apiurl, project, search_project, search_repository)
|
|
|
|
|
|
|
|
if repository:
|
|
|
|
self.logger.debug('found chain to {}/{} via {}'.format(
|
|
|
|
search_project, search_repository, repository))
|
|
|
|
break
|
|
|
|
|
|
|
|
return repository
|
|
|
|
|
2019-07-29 15:43:47 +02:00
|
|
|
def store_yaml(self, state, project, repository, arch):
|
2019-12-10 12:29:51 +01:00
|
|
|
if not self.store_project or not self.store_package:
|
|
|
|
return
|
|
|
|
|
2019-07-25 14:07:25 +02:00
|
|
|
state_yaml = yaml.dump(state, default_flow_style=False)
|
2019-07-29 15:43:47 +02:00
|
|
|
comment = 'Updated rebuild infos for {}/{}/{}'.format(project, repository, arch)
|
2019-07-25 14:07:25 +02:00
|
|
|
source_file_ensure(self.apiurl, self.store_project, self.store_package,
|
2019-07-29 15:43:47 +02:00
|
|
|
self.store_filename, state_yaml, comment=comment)
|
2019-07-25 14:07:25 +02:00
|
|
|
|
2019-07-29 15:29:20 +02:00
|
|
|
def check_pra(self, project, repository, arch):
|
2019-07-25 14:07:25 +02:00
|
|
|
config = Config.get(self.apiurl, project)
|
2019-06-06 15:12:11 +02:00
|
|
|
|
|
|
|
oldstate = None
|
2019-07-25 14:07:25 +02:00
|
|
|
self.store_filename = 'rebuildpacs.{}-{}.yaml'.format(project, repository)
|
2019-12-10 08:38:00 +01:00
|
|
|
if self.store_project and self.store_package:
|
|
|
|
state_yaml = source_file_load(self.apiurl, self.store_project, self.store_package,
|
|
|
|
self.store_filename)
|
|
|
|
if state_yaml:
|
|
|
|
oldstate = yaml.safe_load(state_yaml)
|
2019-06-06 15:12:11 +02:00
|
|
|
|
|
|
|
oldstate = oldstate or {}
|
|
|
|
oldstate.setdefault('check', {})
|
2019-12-10 12:30:06 +01:00
|
|
|
if not isinstance(oldstate['check'], dict):
|
|
|
|
oldstate['check'] = {}
|
2019-06-06 15:12:11 +02:00
|
|
|
oldstate.setdefault('leafs', {})
|
2019-12-10 12:30:06 +01:00
|
|
|
if not isinstance(oldstate['leafs'], dict):
|
|
|
|
oldstate['leafs'] = {}
|
2019-06-06 15:12:11 +02:00
|
|
|
|
|
|
|
repository_pairs = repository_path_expand(self.apiurl, project, repository)
|
|
|
|
directories = []
|
2020-02-11 14:43:21 +01:00
|
|
|
primaryxmls = []
|
2019-06-06 15:12:11 +02:00
|
|
|
for pair_project, pair_repository in repository_pairs:
|
2020-02-11 14:43:21 +01:00
|
|
|
mirrored = mirror(self.apiurl, pair_project, pair_repository, arch)
|
|
|
|
if os.path.isdir(mirrored):
|
|
|
|
directories.append(mirrored)
|
|
|
|
else:
|
|
|
|
primaryxmls.append(mirrored)
|
2019-06-06 15:12:11 +02:00
|
|
|
|
|
|
|
parsed = dict()
|
|
|
|
with tempfile.TemporaryDirectory(prefix='repochecker') as dir:
|
|
|
|
pfile = os.path.join(dir, 'packages')
|
|
|
|
|
|
|
|
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
script = os.path.join(SCRIPT_PATH, 'write_repo_susetags_file.pl')
|
|
|
|
parts = ['perl', script, dir] + directories
|
|
|
|
|
|
|
|
p = subprocess.run(parts)
|
|
|
|
if p.returncode:
|
|
|
|
# technically only 126, but there is no other value atm -
|
|
|
|
# so if some other perl error happens, we don't continue
|
|
|
|
raise CorruptRepos
|
|
|
|
|
|
|
|
target_packages = []
|
|
|
|
with open(os.path.join(dir, 'catalog.yml')) as file:
|
|
|
|
catalog = yaml.safe_load(file)
|
2020-05-15 10:44:59 +02:00
|
|
|
if catalog is not None:
|
|
|
|
target_packages = catalog.get(directories[0], [])
|
2019-06-06 15:12:11 +02:00
|
|
|
|
2020-02-11 14:43:21 +01:00
|
|
|
parsed = parsed_installcheck([pfile] + primaryxmls, arch, target_packages, [])
|
2019-06-06 15:12:11 +02:00
|
|
|
for package in parsed:
|
|
|
|
parsed[package]['output'] = "\n".join(parsed[package]['output'])
|
|
|
|
|
|
|
|
# let's risk a N*N algorithm in the hope that we have a limited N
|
|
|
|
for package1 in parsed:
|
|
|
|
output = parsed[package1]['output']
|
|
|
|
for package2 in parsed:
|
|
|
|
if package1 == package2:
|
|
|
|
continue
|
|
|
|
output = output.replace(parsed[package2]['output'], 'FOLLOWUP(' + package2 + ')')
|
|
|
|
parsed[package1]['output'] = output
|
|
|
|
|
|
|
|
for package in parsed:
|
|
|
|
parsed[package]['output'] = self._split_and_filter(parsed[package]['output'])
|
|
|
|
|
|
|
|
url = makeurl(self.apiurl, ['build', project, '_result'], {
|
|
|
|
'repository': repository, 'arch': arch, 'code': 'succeeded'})
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
succeeding = list(map(lambda x: x.get('package'), root.findall('.//status')))
|
|
|
|
|
|
|
|
per_source = dict()
|
|
|
|
|
|
|
|
for package, entry in parsed.items():
|
|
|
|
source = "{}/{}/{}/{}".format(project, repository, arch, entry['source'])
|
|
|
|
per_source.setdefault(source, {'output': [], 'builds': entry['source'] in succeeding})
|
|
|
|
per_source[source]['output'].extend(entry['output'])
|
|
|
|
|
|
|
|
rebuilds = set()
|
|
|
|
|
|
|
|
for source in sorted(per_source):
|
|
|
|
if not len(per_source[source]['output']):
|
|
|
|
continue
|
|
|
|
self.logger.debug("{} builds: {}".format(source, per_source[source]['builds']))
|
|
|
|
self.logger.debug(" " + "\n ".join(per_source[source]['output']))
|
|
|
|
if not per_source[source]['builds']: # nothing we can do
|
|
|
|
continue
|
|
|
|
old_output = oldstate['check'].get(source, {}).get('problem', [])
|
2019-09-26 14:17:40 +02:00
|
|
|
if sorted(old_output) == sorted(per_source[source]['output']):
|
2019-06-06 15:12:11 +02:00
|
|
|
self.logger.debug("unchanged problem")
|
|
|
|
continue
|
|
|
|
self.logger.info("rebuild %s", source)
|
|
|
|
rebuilds.add(os.path.basename(source))
|
|
|
|
for line in difflib.unified_diff(old_output, per_source[source]['output'], 'before', 'now'):
|
|
|
|
self.logger.debug(line.strip())
|
|
|
|
oldstate['check'][source] = {'problem': per_source[source]['output'],
|
2019-12-10 08:46:02 +01:00
|
|
|
'rebuild': str(datetime.datetime.now())}
|
2019-06-06 15:12:11 +02:00
|
|
|
|
|
|
|
for source in list(oldstate['check']):
|
|
|
|
if not source.startswith('{}/{}/{}/'.format(project, repository, arch)):
|
|
|
|
continue
|
|
|
|
if not os.path.basename(source) in succeeding:
|
|
|
|
continue
|
|
|
|
if source not in per_source:
|
|
|
|
self.logger.info("No known problem, erasing %s", source)
|
|
|
|
del oldstate['check'][source]
|
|
|
|
|
2019-07-25 14:07:25 +02:00
|
|
|
packages = config.get('rebuildpacs-leafs', '').split()
|
2019-07-29 17:17:37 +02:00
|
|
|
if not self.rebuild: # ignore in this case
|
|
|
|
packages = []
|
2019-06-06 15:12:11 +02:00
|
|
|
|
|
|
|
# first round: collect all infos from obs
|
|
|
|
infos = dict()
|
|
|
|
for package in packages:
|
|
|
|
subpacks, build_deps = self.check_leaf_package(project, repository, arch, package)
|
|
|
|
infos[package] = {'subpacks': subpacks, 'deps': build_deps}
|
|
|
|
|
|
|
|
# calculate rebuild triggers
|
|
|
|
rebuild_triggers = dict()
|
|
|
|
for package1 in packages:
|
|
|
|
for package2 in packages:
|
|
|
|
if package1 == package2:
|
|
|
|
continue
|
|
|
|
for subpack in infos[package1]['subpacks']:
|
|
|
|
if subpack in infos[package2]['deps']:
|
|
|
|
rebuild_triggers.setdefault(package1, set())
|
|
|
|
rebuild_triggers[package1].add(package2)
|
|
|
|
# ignore this depencency. we already trigger both of them
|
|
|
|
del infos[package2]['deps'][subpack]
|
|
|
|
|
|
|
|
# calculate build info hashes
|
|
|
|
for package in packages:
|
|
|
|
if not package in succeeding:
|
|
|
|
self.logger.debug("Ignore %s for the moment, not succeeding", package)
|
|
|
|
continue
|
|
|
|
m = hashlib.sha256()
|
|
|
|
for bdep in sorted(infos[package]['deps']):
|
|
|
|
m.update(bytes(bdep + '-' + infos[package]['deps'][bdep], 'utf-8'))
|
|
|
|
state_key = '{}/{}/{}/{}'.format(project, repository, arch, package)
|
|
|
|
olddigest = oldstate['leafs'].get(state_key, {}).get('buildinfo')
|
|
|
|
if olddigest == m.hexdigest():
|
|
|
|
continue
|
|
|
|
self.logger.info("rebuild leaf package %s (%s vs %s)", package, olddigest, m.hexdigest())
|
|
|
|
rebuilds.add(package)
|
|
|
|
oldstate['leafs'][state_key] = {'buildinfo': m.hexdigest(),
|
|
|
|
'rebuild': str(datetime.datetime.now())}
|
|
|
|
|
2019-07-29 17:17:37 +02:00
|
|
|
if self.dryrun:
|
|
|
|
if self.rebuild:
|
|
|
|
self.logger.info("To rebuild: %s", ' '.join(rebuilds))
|
2019-06-06 15:12:11 +02:00
|
|
|
return
|
|
|
|
|
2019-07-29 17:17:37 +02:00
|
|
|
if not self.rebuild or not len(rebuilds):
|
2019-07-25 14:07:25 +02:00
|
|
|
self.logger.debug("Nothing to rebuild")
|
|
|
|
# in case we do rebuild, wait for it to succeed before saving
|
2019-07-29 15:43:47 +02:00
|
|
|
self.store_yaml(oldstate, project, repository, arch)
|
2019-07-25 14:07:25 +02:00
|
|
|
return
|
|
|
|
|
2019-06-06 15:12:11 +02:00
|
|
|
query = {'cmd': 'rebuild', 'repository': repository, 'arch': arch, 'package': rebuilds}
|
2019-12-04 14:48:10 +01:00
|
|
|
url = makeurl(self.apiurl, ['build', project])
|
|
|
|
headers = { 'Content-Type': 'application/x-www-form-urlencoded' }
|
|
|
|
http_request('POST', url, headers, data=urlencode(query, doseq=True))
|
2019-06-06 15:12:11 +02:00
|
|
|
|
2019-07-29 15:43:47 +02:00
|
|
|
self.store_yaml(oldstate, project, repository, arch)
|
2019-06-06 15:12:11 +02:00
|
|
|
|
|
|
|
def check_leaf_package(self, project, repository, arch, package):
|
|
|
|
url = makeurl(self.apiurl, ['build', project, repository, arch, package, '_buildinfo'])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
subpacks = set()
|
|
|
|
for sp in root.findall('subpack'):
|
|
|
|
subpacks.add(sp.text)
|
|
|
|
build_deps = dict()
|
|
|
|
for bd in root.findall('bdep'):
|
|
|
|
if bd.get('notmeta') == '1':
|
|
|
|
continue
|
|
|
|
build_deps[bd.get('name')] = bd.get('version') + '-' + bd.get('release')
|
|
|
|
return subpacks, build_deps
|
|
|
|
|
2019-06-06 07:30:28 +02:00
|
|
|
|
|
|
|
class CommandLineInterface(ToolBase.CommandLineInterface):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToolBase.CommandLineInterface.__init__(self, args, kwargs)
|
|
|
|
|
|
|
|
def setup_tool(self):
|
2019-06-06 15:12:11 +02:00
|
|
|
return RepoChecker()
|
|
|
|
|
2019-07-25 14:07:25 +02:00
|
|
|
@cmdln.option('--store', help='Project/Package to store the rebuild infos in')
|
2019-07-29 15:29:20 +02:00
|
|
|
@cmdln.option('-r', '--repo', dest='repo', help='Repository to check')
|
2019-07-30 09:02:43 +02:00
|
|
|
@cmdln.option('--no-rebuild', dest='norebuild', action='store_true', help='Only track issues, do not rebuild')
|
2019-07-29 15:29:20 +02:00
|
|
|
def do_check(self, subcmd, opts, project):
|
2019-06-06 15:12:11 +02:00
|
|
|
"""${cmd_name}: Rebuild packages in rebuild=local projects
|
2019-06-06 07:30:28 +02:00
|
|
|
|
2019-06-06 15:12:11 +02:00
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
2019-07-30 09:02:43 +02:00
|
|
|
self.tool.rebuild = not opts.norebuild
|
2019-07-25 14:07:25 +02:00
|
|
|
self.tool.parse_store(opts.store)
|
2019-06-06 15:12:11 +02:00
|
|
|
self.tool.apiurl = conf.config['apiurl']
|
2019-07-29 15:29:20 +02:00
|
|
|
self.tool.check(project, opts.repo)
|
2019-06-06 07:30:28 +02:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
app = CommandLineInterface()
|
|
|
|
sys.exit(app.main())
|