2014-07-21 11:28:00 +02:00
|
|
|
# Copyright (C) 2014 SUSE Linux Products GmbH
|
2013-03-21 11:34:18 +01:00
|
|
|
#
|
2014-07-21 11:28:00 +02:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
2013-03-21 11:34:18 +01:00
|
|
|
#
|
2014-07-21 11:28:00 +02:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
|
|
|
2013-03-21 11:34:18 +01:00
|
|
|
# Copy this script to ~/.osc-plugins/ or /var/lib/osc-plugins .
|
2013-06-20 17:30:54 +02:00
|
|
|
# Then try to run 'osc check_repo --help' to see the usage.
|
2013-03-21 11:34:18 +01:00
|
|
|
|
2014-06-17 18:01:08 +02:00
|
|
|
from collections import defaultdict
|
|
|
|
from collections import namedtuple
|
2013-03-21 11:34:18 +01:00
|
|
|
import os
|
2013-06-20 17:30:54 +02:00
|
|
|
import shutil
|
2013-08-09 15:57:45 +02:00
|
|
|
import subprocess
|
2013-09-04 10:33:48 +02:00
|
|
|
import tempfile
|
2014-10-31 10:23:18 +01:00
|
|
|
import traceback
|
2014-05-26 16:03:06 +02:00
|
|
|
import sys
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2013-07-24 16:54:55 +02:00
|
|
|
from osc import cmdln
|
2015-02-10 17:22:00 +01:00
|
|
|
from osc import conf
|
|
|
|
from osc import oscerr
|
2013-07-24 16:54:55 +02:00
|
|
|
|
2017-05-02 15:32:49 -05:00
|
|
|
PLUGINDIR = os.path.dirname(os.path.realpath(__file__.replace('.pyc', '.py')))
|
2015-02-26 10:57:06 +01:00
|
|
|
from osclib.conf import Config
|
2014-07-21 14:33:48 +02:00
|
|
|
from osclib.checkrepo import CheckRepo
|
2014-10-01 17:41:26 +02:00
|
|
|
from osclib.checkrepo import BINCACHE, DOWNLOADS
|
2017-01-20 17:54:49 -06:00
|
|
|
from osclib.cache import Cache
|
2014-05-26 16:03:06 +02:00
|
|
|
from osclib.cycle import CycleDetector
|
2014-06-05 17:44:02 +02:00
|
|
|
from osclib.memoize import CACHEDIR
|
2017-01-20 17:54:49 -06:00
|
|
|
from osclib.memoize import save_cache_path
|
2014-10-31 15:30:30 +01:00
|
|
|
from osclib.request_finder import RequestFinder
|
2014-05-26 16:03:06 +02:00
|
|
|
|
2017-01-20 17:54:49 -06:00
|
|
|
Cache.CACHE_DIR = save_cache_path('opensuse-repo-checker-http')
|
|
|
|
|
2013-07-24 16:54:55 +02:00
|
|
|
|
2014-08-13 09:46:37 +02:00
|
|
|
def _check_repo_download(self, request):
|
2014-11-25 14:19:08 +01:00
|
|
|
toignore = set()
|
2014-07-21 11:10:46 +02:00
|
|
|
request.downloads = defaultdict(list)
|
2014-01-02 21:27:20 +01:00
|
|
|
|
2016-02-25 22:58:13 +08:00
|
|
|
# do not check package lists if the repository was excluded
|
|
|
|
if request.build_excluded:
|
2014-11-29 09:57:01 +01:00
|
|
|
return set()
|
2016-02-24 19:56:31 +08:00
|
|
|
|
|
|
|
# XXX TODO - Rewrite the logic here, meanwhile set is to x86_64
|
2014-11-29 09:57:01 +01:00
|
|
|
arch = 'x86_64'
|
2016-04-20 20:54:56 +08:00
|
|
|
|
|
|
|
if request.src_package in request.i686_only:
|
|
|
|
# Use imported binaries from x86_64 to check the requirements is fine,
|
|
|
|
# but we can not get rpmlint.log from x86_64 repo if it was excluded,
|
|
|
|
# i586 repo should be are build succeeded already as we have check it
|
|
|
|
# in repositories_to_check(). Therefore, we have to download binary
|
|
|
|
# binary files from i586 repo.
|
|
|
|
arch = 'i586'
|
|
|
|
|
2014-11-29 09:57:01 +01:00
|
|
|
# if not request.build_excluded:
|
|
|
|
# arch = 'x86_64'
|
|
|
|
# else:
|
|
|
|
# arch = 'i586'
|
2013-05-13 12:48:21 +02:00
|
|
|
|
2014-06-17 18:01:08 +02:00
|
|
|
ToDownload = namedtuple('ToDownload', ('project', 'repo', 'arch', 'package', 'size'))
|
|
|
|
|
2014-07-01 19:55:17 +02:00
|
|
|
for i, goodrepo in enumerate(request.goodrepos):
|
|
|
|
repo = goodrepo[1]
|
|
|
|
|
2014-11-10 15:32:52 +01:00
|
|
|
# we assume x86_64 is there unless build is excluded
|
2014-11-25 14:19:08 +01:00
|
|
|
pkglist = self.checkrepo.get_package_list_from_repository(
|
|
|
|
request.shadow_src_project, repo, arch,
|
|
|
|
request.src_package)
|
2014-11-10 15:32:52 +01:00
|
|
|
todownload = [ToDownload(request.shadow_src_project, repo, arch,
|
2014-11-25 14:19:08 +01:00
|
|
|
fn[0], fn[3]) for fn in pkglist]
|
|
|
|
|
|
|
|
toignore.update(fn[1] for fn in pkglist)
|
2014-06-17 18:01:08 +02:00
|
|
|
|
2014-07-21 16:29:49 +02:00
|
|
|
self.checkrepo._download(request, todownload)
|
2014-06-17 18:01:08 +02:00
|
|
|
if request.error:
|
|
|
|
return set()
|
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
staging_prefix = '{}:'.format(self.checkrepo.staging.cstaging)
|
2014-08-13 09:46:37 +02:00
|
|
|
if staging_prefix in str(request.group):
|
2014-11-25 14:19:08 +01:00
|
|
|
pkglist = self.checkrepo.get_package_list_from_repository(
|
|
|
|
request.group, 'standard', arch,
|
|
|
|
request.src_package)
|
2014-11-10 15:32:52 +01:00
|
|
|
todownload = [ToDownload(request.group, 'standard', arch,
|
2014-11-25 14:19:08 +01:00
|
|
|
fn[0], fn[3]) for fn in pkglist]
|
2014-06-17 18:01:08 +02:00
|
|
|
|
2014-07-21 16:29:49 +02:00
|
|
|
self.checkrepo._download(request, todownload)
|
2014-06-17 18:01:08 +02:00
|
|
|
if request.error:
|
|
|
|
return set()
|
2014-01-01 15:53:52 +01:00
|
|
|
|
2014-11-25 14:19:08 +01:00
|
|
|
toignore.update(fn[1] for fn in pkglist)
|
|
|
|
|
|
|
|
pkglist = self.checkrepo.get_package_list_from_repository(
|
|
|
|
request.group + ':DVD', 'standard',
|
|
|
|
'x86_64', request.src_package)
|
2014-07-21 14:33:48 +02:00
|
|
|
todownload = [ToDownload(request.group + ':DVD', 'standard',
|
2014-11-25 14:19:08 +01:00
|
|
|
'x86_64', fn[0], fn[3]) for fn in pkglist]
|
|
|
|
|
|
|
|
toignore.update(fn[1] for fn in pkglist)
|
2014-06-23 11:03:47 +02:00
|
|
|
|
2014-07-21 16:29:49 +02:00
|
|
|
self.checkrepo._download(request, todownload)
|
2014-06-23 11:03:47 +02:00
|
|
|
if request.error:
|
|
|
|
return set()
|
2014-12-01 10:01:01 +01:00
|
|
|
|
|
|
|
# Update toignore with the names of the source project (here in
|
|
|
|
# this method) and with the names of the target project (_toignore
|
|
|
|
# method).
|
|
|
|
toignore.update(self.checkrepo._toignore(request))
|
2014-11-25 14:19:08 +01:00
|
|
|
return toignore
|
2013-06-20 17:30:54 +02:00
|
|
|
|
|
|
|
|
2014-06-23 13:45:48 +02:00
|
|
|
# Used in _check_repo_group only to cache error messages
|
|
|
|
_errors_printed = set()
|
|
|
|
|
|
|
|
|
2015-03-05 12:44:57 +01:00
|
|
|
def _check_repo_group(self, id_, requests, skip_cycle=None, debug=False):
|
|
|
|
if skip_cycle is None:
|
|
|
|
skip_cycle = []
|
|
|
|
|
2014-06-27 15:12:39 +02:00
|
|
|
print '> Check group [%s]' % ', '.join(r.str_compact() for r in requests)
|
2014-06-16 17:57:40 +02:00
|
|
|
|
2014-09-15 13:32:02 +02:00
|
|
|
# XXX TODO - If the requests comes from command line, the group is
|
|
|
|
# still not there.
|
|
|
|
|
2016-02-24 19:56:31 +08:00
|
|
|
# Do not continue if any of the packages do not successfully build.
|
2014-08-21 17:00:37 +02:00
|
|
|
if not all(self.checkrepo.is_buildsuccess(r) for r in requests if r.action_type != 'delete'):
|
2013-07-22 17:19:13 +02:00
|
|
|
return
|
|
|
|
|
2014-01-01 15:53:52 +01:00
|
|
|
toignore = set()
|
2014-09-15 13:32:02 +02:00
|
|
|
destdir = os.path.join(BINCACHE, str(requests[0].group))
|
|
|
|
fetched = {r: False for r in self.checkrepo.groups.get(id_, [])}
|
2013-06-17 12:19:38 +02:00
|
|
|
packs = []
|
2014-01-01 15:53:52 +01:00
|
|
|
|
2014-06-12 18:25:45 +02:00
|
|
|
for request in requests:
|
2014-08-21 17:00:37 +02:00
|
|
|
if request.action_type == 'delete':
|
|
|
|
continue
|
|
|
|
|
2014-08-13 09:46:37 +02:00
|
|
|
i = self._check_repo_download(request)
|
2014-06-23 13:45:48 +02:00
|
|
|
if request.error and request.error not in _errors_printed:
|
|
|
|
_errors_printed.add(request.error)
|
2014-06-12 18:25:45 +02:00
|
|
|
if not request.updated:
|
2014-06-27 15:12:39 +02:00
|
|
|
print ' - %s' % request.error
|
2014-06-12 18:25:45 +02:00
|
|
|
self.checkrepo.change_review_state(request.request_id, 'new', message=request.error)
|
|
|
|
request.updated = True
|
2013-10-02 11:46:35 +02:00
|
|
|
else:
|
2014-06-27 15:12:39 +02:00
|
|
|
print ' - %s' % request.error
|
2013-04-30 11:49:26 +02:00
|
|
|
return
|
2014-01-01 15:53:52 +01:00
|
|
|
toignore.update(i)
|
2014-06-12 18:25:45 +02:00
|
|
|
fetched[request.request_id] = True
|
2014-08-21 17:00:37 +02:00
|
|
|
|
|
|
|
packs.extend(requests)
|
2013-04-30 11:49:26 +02:00
|
|
|
|
2014-06-17 18:01:08 +02:00
|
|
|
# Extend packs array with the packages and .spec files of the
|
|
|
|
# not-fetched requests. The not fetched ones are the requests of
|
|
|
|
# the same group that are not listed as a paramater.
|
|
|
|
for request_id, is_fetched in fetched.items():
|
|
|
|
if not is_fetched:
|
2014-06-12 18:25:45 +02:00
|
|
|
packs.extend(self.checkrepo.check_specs(request_id=request_id))
|
2014-06-17 18:01:08 +02:00
|
|
|
|
|
|
|
# Download the repos from the request of the same group not
|
|
|
|
# explicited in the command line.
|
2014-11-12 14:47:13 +01:00
|
|
|
for rq in packs[:]:
|
2014-08-21 17:00:37 +02:00
|
|
|
if rq.request_id in fetched and fetched[rq.request_id]:
|
2014-03-04 14:34:16 +01:00
|
|
|
continue
|
2014-07-17 11:36:05 +02:00
|
|
|
i = set()
|
|
|
|
if rq.action_type == 'delete':
|
2014-08-21 17:00:37 +02:00
|
|
|
# for delete requests we care for toignore
|
2014-07-21 14:33:48 +02:00
|
|
|
i = self.checkrepo._toignore(rq)
|
2014-08-21 17:00:37 +02:00
|
|
|
# We also check that nothing depends on the package and
|
|
|
|
# that the request originates by the package maintainer
|
2014-11-10 14:05:49 +01:00
|
|
|
error_delete = self.checkrepo.is_safe_to_delete(rq)
|
|
|
|
if error_delete:
|
2014-11-12 14:47:13 +01:00
|
|
|
rq.error = 'This delete request is not safe to accept yet, ' \
|
|
|
|
'please wait until the reasons dissapear in the ' \
|
|
|
|
'target project. %s' % error_delete
|
2014-08-21 17:00:37 +02:00
|
|
|
print ' - %s' % rq.error
|
2014-11-12 14:47:13 +01:00
|
|
|
self.checkrepo.change_review_state(request.request_id, 'new', message=request.error)
|
2014-08-21 17:00:37 +02:00
|
|
|
rq.updated = True
|
2014-11-12 14:47:13 +01:00
|
|
|
else:
|
|
|
|
msg = 'Request is safe %s' % rq
|
|
|
|
print 'ACCEPTED', msg
|
|
|
|
self.checkrepo.change_review_state(rq.request_id, 'accepted', message=msg)
|
|
|
|
|
|
|
|
# Remove it from the packs, so do not interfere with the
|
|
|
|
# rest of the check.
|
|
|
|
packs.remove(rq)
|
2014-07-17 11:36:05 +02:00
|
|
|
else:
|
|
|
|
# we need to call it to fetch the good repos to download
|
|
|
|
# but the return value is of no interest right now.
|
|
|
|
self.checkrepo.is_buildsuccess(rq)
|
2014-08-13 09:46:37 +02:00
|
|
|
i = self._check_repo_download(rq)
|
2014-07-17 11:36:05 +02:00
|
|
|
if rq.error:
|
|
|
|
print 'ERROR (ALREADY ACEPTED?):', rq.error
|
|
|
|
rq.updated = True
|
|
|
|
|
2014-01-01 15:53:52 +01:00
|
|
|
toignore.update(i)
|
2013-04-30 11:49:26 +02:00
|
|
|
|
2014-08-13 09:46:37 +02:00
|
|
|
# Detect cycles into the current Factory / openSUSE graph after we
|
|
|
|
# update the links with the current list of request.
|
|
|
|
cycle_detector = CycleDetector(self.checkrepo.staging)
|
2015-02-11 14:02:14 +01:00
|
|
|
for (cycle, new_edges, new_pkgs) in cycle_detector.cycles(requests=packs):
|
|
|
|
# If there are new cycles that also contains new packages,
|
|
|
|
# mark all packages as updated, to avoid to be accepted
|
|
|
|
if new_pkgs:
|
|
|
|
print
|
|
|
|
print ' - New cycle detected:', sorted(cycle)
|
|
|
|
print ' - New edges:', new_edges
|
2015-03-05 12:44:57 +01:00
|
|
|
|
|
|
|
if skip_cycle:
|
|
|
|
print ' - Skiping this cycle and moving to the next check.'
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
print ' - If you want to skip this cycle, run manually the ' \
|
|
|
|
'check repo with -c / --skipcycle option.'
|
|
|
|
|
2015-02-11 14:02:14 +01:00
|
|
|
for request in requests:
|
|
|
|
request.updated = True
|
2013-08-02 14:55:40 +02:00
|
|
|
|
2014-06-12 18:25:45 +02:00
|
|
|
for rq in requests:
|
2015-02-24 17:57:19 +01:00
|
|
|
# Check if there are any goodrepo without missing packages
|
|
|
|
missing_repos = set(rq.missings.keys())
|
|
|
|
if any(r not in missing_repos for r in rq.goodrepos):
|
|
|
|
continue
|
|
|
|
|
2013-04-30 11:49:26 +02:00
|
|
|
smissing = []
|
2015-02-24 17:57:19 +01:00
|
|
|
all_missing_packages = {item for sublist in rq.missings.values() for item in sublist}
|
|
|
|
for package in all_missing_packages:
|
2013-08-06 15:35:13 +02:00
|
|
|
alreadyin = False
|
2013-04-30 11:49:26 +02:00
|
|
|
for t in packs:
|
2014-06-12 18:25:45 +02:00
|
|
|
if package == t.tgt_package:
|
2014-03-04 14:34:16 +01:00
|
|
|
alreadyin = True
|
2013-04-30 11:49:26 +02:00
|
|
|
if alreadyin:
|
|
|
|
continue
|
2014-07-21 14:33:48 +02:00
|
|
|
request = self.checkrepo.find_request_id(rq.tgt_project, package)
|
2013-04-30 11:49:26 +02:00
|
|
|
if request:
|
2014-08-13 09:46:37 +02:00
|
|
|
greqs = self.checkrepo.groups.get(rq.group, [])
|
2014-03-04 14:34:16 +01:00
|
|
|
if request in greqs:
|
|
|
|
continue
|
2014-07-14 16:58:36 +02:00
|
|
|
package = '#[%s](%s)' % (request, package)
|
2013-04-30 11:49:26 +02:00
|
|
|
smissing.append(package)
|
|
|
|
if len(smissing):
|
2014-11-03 11:28:28 +01:00
|
|
|
msg = 'Please make sure to wait before these depencencies are in %s: %s [%s]' % (
|
|
|
|
rq.tgt_project, ', '.join(smissing), rq.tgt_package)
|
2014-06-12 18:25:45 +02:00
|
|
|
if not rq.updated:
|
|
|
|
self.checkrepo.change_review_state(rq.request_id, 'new', message=msg)
|
2014-06-27 15:12:39 +02:00
|
|
|
print ' - %s' % msg
|
2014-06-12 18:25:45 +02:00
|
|
|
rq.updated = True
|
2013-10-02 11:46:35 +02:00
|
|
|
else:
|
2014-06-27 15:12:39 +02:00
|
|
|
print ' - %s' % msg
|
2013-04-30 11:49:26 +02:00
|
|
|
return
|
|
|
|
|
2013-09-04 10:33:48 +02:00
|
|
|
# Create a temporal file for the params
|
2013-09-04 10:44:45 +02:00
|
|
|
params_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
params_file.write('\n'.join(f for f in toignore if f.strip()))
|
2013-09-04 10:33:48 +02:00
|
|
|
params_file.close()
|
2014-01-01 15:53:52 +01:00
|
|
|
|
2014-06-17 18:01:08 +02:00
|
|
|
# We want to test with the Perl script the binaries of one of the
|
|
|
|
# repos, and if fail test the other repo. The order of testing
|
|
|
|
# will be stored in the execution_plan.
|
|
|
|
|
|
|
|
execution_plan = defaultdict(list)
|
|
|
|
|
2014-08-27 11:35:52 +02:00
|
|
|
DEBUG_PLAN = debug
|
2014-07-29 12:27:18 +02:00
|
|
|
|
2014-07-01 19:55:17 +02:00
|
|
|
# Get all the (project, repo, disturl) where the disturl is
|
|
|
|
# compatible with the request. For the same package we can have
|
|
|
|
# more than one good triplet, even with different MD5 DISTRUL.
|
|
|
|
# The general strategy is collect that the different triplets and
|
|
|
|
# provide some execution_plans where for the same (project, repo)
|
|
|
|
# for every package, with a fallback to a different (project,
|
|
|
|
# repo) in case that the original is not found.
|
|
|
|
all_good_downloads = defaultdict(set)
|
2014-06-17 18:01:08 +02:00
|
|
|
for rq in packs:
|
2014-07-01 19:55:17 +02:00
|
|
|
for (prj, repo, disturl) in rq.downloads:
|
2014-07-21 11:10:46 +02:00
|
|
|
if self.checkrepo.check_disturl(rq, md5_disturl=disturl):
|
2014-07-01 19:55:17 +02:00
|
|
|
all_good_downloads[(prj, repo)].add(disturl)
|
2014-07-29 12:27:18 +02:00
|
|
|
if DEBUG_PLAN:
|
|
|
|
print 'DEBUG Good DISTURL -', rq.str_compact(), (prj, repo), disturl
|
|
|
|
elif DEBUG_PLAN:
|
|
|
|
print 'DEBUG Bad DISTURL -', rq.str_compact(), (prj, repo), disturl
|
2014-07-01 19:55:17 +02:00
|
|
|
|
|
|
|
if not all_good_downloads:
|
2014-08-28 14:42:54 +02:00
|
|
|
print ' - No matching downloads for disturl found.'
|
2014-11-10 14:05:49 +01:00
|
|
|
if len(packs) == 1 and packs[0].tgt_package in ('rpmlint-tests'):
|
|
|
|
print ' - %s known to have no installable rpms, skipped' % packs[0].tgt_package
|
2014-03-04 14:34:16 +01:00
|
|
|
return
|
2014-01-23 12:01:00 +01:00
|
|
|
|
2014-07-02 10:28:59 +02:00
|
|
|
for project, repo in all_good_downloads:
|
|
|
|
plan = (project, repo)
|
|
|
|
valid_disturl = all_good_downloads[plan]
|
2014-07-29 12:27:18 +02:00
|
|
|
if DEBUG_PLAN:
|
|
|
|
print 'DEBUG Designing plan', plan, valid_disturl
|
2014-07-02 10:28:59 +02:00
|
|
|
for rq in packs:
|
2014-07-29 12:27:18 +02:00
|
|
|
if DEBUG_PLAN:
|
|
|
|
print 'DEBUG In', rq
|
2014-07-01 19:55:17 +02:00
|
|
|
# Find (project, repo) in rq.downloads.
|
2014-07-21 11:10:46 +02:00
|
|
|
keys = [key for key in rq.downloads if key[0] == project and key[1] == repo and key[2] in valid_disturl]
|
2014-07-29 12:27:18 +02:00
|
|
|
if DEBUG_PLAN:
|
|
|
|
print 'DEBUG Keys', keys
|
2014-07-01 19:55:17 +02:00
|
|
|
|
|
|
|
if keys:
|
2014-09-29 16:28:11 +02:00
|
|
|
assert len(keys) == 1, 'Found more than one candidate for the plan'
|
2014-09-01 14:14:16 +02:00
|
|
|
|
2014-07-21 11:10:46 +02:00
|
|
|
execution_plan[plan].append((rq, plan, rq.downloads[keys[0]]))
|
2014-07-29 12:27:18 +02:00
|
|
|
if DEBUG_PLAN:
|
|
|
|
print 'DEBUG Downloads', rq.downloads[keys[0]]
|
2014-06-17 18:01:08 +02:00
|
|
|
else:
|
2014-07-29 12:27:18 +02:00
|
|
|
if DEBUG_PLAN:
|
|
|
|
print 'DEBUG Searching for a fallback!'
|
2014-07-21 11:10:46 +02:00
|
|
|
fallbacks = [key for key in rq.downloads if (key[0], key[1]) in all_good_downloads and key[2] in all_good_downloads[(key[0], key[1])]]
|
2014-07-01 19:55:17 +02:00
|
|
|
if fallbacks:
|
2014-07-21 11:10:46 +02:00
|
|
|
# XXX TODO - Recurse here to create combinations
|
2014-07-29 12:27:18 +02:00
|
|
|
# Meanwhile, I will priorize the one fallback that is in a staging project.
|
|
|
|
fallbacks_from_staging = [fb for fb in fallbacks if 'Staging' in fb[0]]
|
|
|
|
fallbacks = fallbacks_from_staging if fallbacks_from_staging else fallbacks
|
2014-07-01 19:55:17 +02:00
|
|
|
fallback = fallbacks.pop()
|
2014-07-29 12:27:18 +02:00
|
|
|
if DEBUG_PLAN:
|
|
|
|
print 'DEBUG Fallback found', fallback
|
|
|
|
print 'DEBUG Fallback downloads', rq.downloads[fallback]
|
2014-07-01 19:55:17 +02:00
|
|
|
|
2014-07-02 10:28:59 +02:00
|
|
|
alternative_plan = fallback[:2]
|
2014-07-21 11:10:46 +02:00
|
|
|
execution_plan[plan].append((rq, alternative_plan, rq.downloads[fallback]))
|
2014-07-04 14:21:43 +02:00
|
|
|
# elif rq.status == 'succeeded':
|
2014-07-02 10:28:59 +02:00
|
|
|
else:
|
|
|
|
print 'no fallback for', rq
|
2014-07-01 19:55:17 +02:00
|
|
|
|
2014-06-23 14:19:20 +02:00
|
|
|
repo_checker_error = ''
|
2014-07-01 19:55:17 +02:00
|
|
|
for project_repo in execution_plan:
|
|
|
|
dirstolink = execution_plan[project_repo]
|
|
|
|
|
2014-07-29 12:27:18 +02:00
|
|
|
if DEBUG_PLAN:
|
|
|
|
print 'DEBUG Running plan', project_repo
|
|
|
|
for rq, repo, downloads in dirstolink:
|
|
|
|
print ' ', rq
|
|
|
|
print ' ', repo
|
|
|
|
for f in downloads:
|
|
|
|
print ' -', f
|
2014-07-01 19:55:17 +02:00
|
|
|
|
2014-07-23 15:34:34 +02:00
|
|
|
# Makes sure to remove the directory is case of early exit.
|
|
|
|
if os.path.exists(destdir):
|
|
|
|
shutil.rmtree(destdir)
|
|
|
|
|
2014-01-01 15:53:52 +01:00
|
|
|
os.makedirs(destdir)
|
2014-07-01 19:55:17 +02:00
|
|
|
for rq, _, downloads in dirstolink:
|
|
|
|
dir_ = destdir + '/%s' % rq.tgt_package
|
2014-01-01 15:53:52 +01:00
|
|
|
for d in downloads:
|
2014-07-01 19:55:17 +02:00
|
|
|
if not os.path.exists(dir_):
|
|
|
|
os.mkdir(dir_)
|
2014-07-29 12:31:28 +02:00
|
|
|
target = os.path.join(dir_, os.path.basename(d))
|
|
|
|
if os.path.exists(target):
|
|
|
|
print 'Warning, symlink already exists', d, target
|
|
|
|
os.unlink(target)
|
2014-07-29 12:51:19 +02:00
|
|
|
os.symlink(d, target)
|
2014-01-01 15:53:52 +01:00
|
|
|
|
2014-08-29 15:49:05 +02:00
|
|
|
repochecker = os.path.join(PLUGINDIR, 'repo-checker.pl')
|
2014-06-13 16:10:54 +02:00
|
|
|
civs = "LC_ALL=C perl %s '%s' -r %s -f %s" % (repochecker, destdir, self.repo_dir, params_file.name)
|
2014-04-22 11:46:43 +02:00
|
|
|
p = subprocess.Popen(civs, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
|
|
|
|
stdoutdata, stderrdata = p.communicate()
|
2014-07-04 14:21:43 +02:00
|
|
|
stdoutdata = stdoutdata.strip()
|
2014-01-01 15:53:52 +01:00
|
|
|
ret = p.returncode
|
2014-06-23 11:03:47 +02:00
|
|
|
|
2014-07-22 17:19:17 +02:00
|
|
|
# Clean the directory that contains all the symlinks
|
|
|
|
shutil.rmtree(destdir)
|
|
|
|
|
2014-06-23 13:45:48 +02:00
|
|
|
# There are several execution plans, each one can have its own
|
2014-07-04 14:21:43 +02:00
|
|
|
# error message.
|
|
|
|
if ret:
|
2014-09-01 09:43:44 +02:00
|
|
|
print ' - Execution plan for %s failed' % str(project_repo)
|
2014-08-28 17:09:01 +02:00
|
|
|
else:
|
|
|
|
print ' - Successful plan', project_repo
|
|
|
|
|
|
|
|
if stdoutdata:
|
2014-07-04 14:21:43 +02:00
|
|
|
print '-' * 40
|
|
|
|
print stdoutdata
|
|
|
|
print '-' * 40
|
2014-08-28 17:09:01 +02:00
|
|
|
if stderrdata:
|
|
|
|
print '-' * 40
|
|
|
|
print stderrdata
|
|
|
|
print '-' * 40
|
2014-07-04 14:21:43 +02:00
|
|
|
|
|
|
|
# Detect if this error message comes from a staging project.
|
|
|
|
# Store it in the repo_checker_error, that is the text that
|
|
|
|
# will be published in the error message.
|
2015-02-19 10:57:55 +01:00
|
|
|
staging_prefix = '{}:'.format(self.checkrepo.staging.cstaging)
|
2014-08-13 09:46:37 +02:00
|
|
|
if staging_prefix in project_repo[0]:
|
2014-07-04 14:21:43 +02:00
|
|
|
repo_checker_error = stdoutdata
|
2014-08-13 09:46:37 +02:00
|
|
|
if not any(staging_prefix in p_r[0] for p_r in execution_plan):
|
2014-07-04 14:40:05 +02:00
|
|
|
repo_checker_error += '\nExecution plan: %s\n%s' % ('/'.join(project_repo), stdoutdata)
|
2014-06-23 13:45:48 +02:00
|
|
|
|
2014-04-29 15:01:16 +02:00
|
|
|
# print ret, stdoutdata, stderrdata
|
2014-06-23 11:03:47 +02:00
|
|
|
# raise Exception()
|
|
|
|
|
2014-03-04 14:34:16 +01:00
|
|
|
if not ret: # skip the others
|
2014-07-01 19:55:17 +02:00
|
|
|
for p, gr, downloads in dirstolink:
|
|
|
|
p.goodrepo = '%s/%s' % gr
|
2014-03-04 14:34:16 +01:00
|
|
|
break
|
2014-06-17 18:01:08 +02:00
|
|
|
|
2013-09-04 11:13:26 +02:00
|
|
|
os.unlink(params_file.name)
|
2014-03-04 14:34:16 +01:00
|
|
|
|
|
|
|
updated = {}
|
2013-04-17 20:43:50 +02:00
|
|
|
|
|
|
|
if ret:
|
2014-06-12 18:25:45 +02:00
|
|
|
for rq in requests:
|
|
|
|
if updated.get(rq.request_id, False) or rq.updated:
|
2013-10-02 11:46:35 +02:00
|
|
|
continue
|
2014-06-23 13:45:48 +02:00
|
|
|
if repo_checker_error not in _errors_printed:
|
|
|
|
_errors_printed.add(repo_checker_error)
|
|
|
|
print repo_checker_error
|
|
|
|
self.checkrepo.change_review_state(rq.request_id, 'new', message=repo_checker_error)
|
2013-10-02 11:46:35 +02:00
|
|
|
p.updated = True
|
2014-06-12 18:25:45 +02:00
|
|
|
updated[rq.request_id] = 1
|
2013-04-17 20:43:50 +02:00
|
|
|
return
|
2014-06-16 17:57:40 +02:00
|
|
|
|
2014-06-12 18:25:45 +02:00
|
|
|
for rq in requests:
|
|
|
|
if updated.get(rq.request_id, False) or rq.updated:
|
2013-10-02 11:46:35 +02:00
|
|
|
continue
|
2016-02-25 23:03:42 +08:00
|
|
|
if not hasattr(rq, 'goodrepo'):
|
|
|
|
msg = 'Can not find a good repo for %s' % rq.str_compact()
|
|
|
|
print 'NOT ACCEPTED - ', msg
|
|
|
|
print 'Perhaps this request is not against i586/x86_64 build or i586 build only. For human to check!'
|
|
|
|
continue
|
2014-06-12 18:25:45 +02:00
|
|
|
msg = 'Builds for repo %s' % rq.goodrepo
|
2013-10-02 11:46:35 +02:00
|
|
|
print 'ACCEPTED', msg
|
2014-06-12 18:25:45 +02:00
|
|
|
self.checkrepo.change_review_state(rq.request_id, 'accepted', message=msg)
|
2014-08-18 17:18:00 +02:00
|
|
|
self.checkrepo.remove_link_if_shadow_devel(rq)
|
2014-06-12 18:25:45 +02:00
|
|
|
rq.updated = True
|
|
|
|
updated[rq.request_id] = 1
|
2013-04-18 13:33:25 +02:00
|
|
|
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2014-08-13 09:46:37 +02:00
|
|
|
def _mirror_full(self, plugin_dir, repo_dir):
|
2014-06-13 16:10:54 +02:00
|
|
|
"""Call bs_mirrorfull script to mirror packages."""
|
2016-01-22 01:27:43 +08:00
|
|
|
url = 'https://api.opensuse.org/public/build/%s/%s/x86_64' % (self.checkrepo.project, 'standard')
|
2014-06-13 16:10:54 +02:00
|
|
|
|
|
|
|
if not os.path.exists(repo_dir):
|
|
|
|
os.mkdir(repo_dir)
|
|
|
|
|
|
|
|
script = 'LC_ALL=C perl %s/bs_mirrorfull --nodebug %s %s' % (plugin_dir, url, repo_dir)
|
|
|
|
os.system(script)
|
|
|
|
|
|
|
|
|
2014-06-25 11:59:07 +02:00
|
|
|
def _print_request_and_specs(self, request_and_specs):
|
2016-08-02 10:52:10 +02:00
|
|
|
if not request_and_specs:
|
|
|
|
return
|
2014-06-25 11:59:07 +02:00
|
|
|
print request_and_specs[0]
|
|
|
|
for spec in request_and_specs[1:]:
|
2014-06-27 15:12:39 +02:00
|
|
|
print ' *', spec
|
2014-06-25 11:59:07 +02:00
|
|
|
|
|
|
|
|
2013-07-22 16:15:32 +02:00
|
|
|
@cmdln.alias('check', 'cr')
|
2014-08-13 09:46:37 +02:00
|
|
|
@cmdln.option('-p', '--project', dest='project', metavar='PROJECT', default='Factory',
|
|
|
|
help='select a different project instead of openSUSE:Factory')
|
2013-07-22 16:15:32 +02:00
|
|
|
@cmdln.option('-s', '--skip', action='store_true', help='skip review')
|
2015-03-05 12:44:57 +01:00
|
|
|
@cmdln.option('-c', '--skipcycle', action='store_true', help='skip cycle check')
|
2014-08-27 11:35:52 +02:00
|
|
|
@cmdln.option('-n', '--dry', action='store_true', help='dry run, don\'t change review state')
|
|
|
|
@cmdln.option('-v', '--verbose', action='store_true', help='verbose output')
|
2013-03-21 11:34:18 +01:00
|
|
|
def do_check_repo(self, subcmd, opts, *args):
|
2013-07-22 16:15:32 +02:00
|
|
|
"""${cmd_name}: Checker review of submit requests.
|
2013-03-21 11:34:18 +01:00
|
|
|
|
|
|
|
Usage:
|
2013-07-22 16:15:32 +02:00
|
|
|
${cmd_name} [SRID]...
|
2013-03-21 11:34:18 +01:00
|
|
|
Shows pending review requests and their current state.
|
2014-07-23 15:44:05 +02:00
|
|
|
${cmd_name} PRJ
|
|
|
|
Shows pending review requests in a specific project.
|
2013-03-21 11:34:18 +01:00
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2014-10-01 17:41:26 +02:00
|
|
|
# Makes sure to remove the place where are the downloaded files
|
|
|
|
# (is not the cache)
|
2014-10-06 09:47:27 +02:00
|
|
|
try:
|
|
|
|
shutil.rmtree(DOWNLOADS)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
os.makedirs(DOWNLOADS)
|
2014-10-01 17:41:26 +02:00
|
|
|
|
2015-02-26 10:57:06 +01:00
|
|
|
Config('openSUSE:%s' % opts.project)
|
|
|
|
self.checkrepo = CheckRepo(self.get_api_url(),
|
|
|
|
'openSUSE:%s' % opts.project,
|
|
|
|
readonly=opts.dry,
|
|
|
|
debug=opts.verbose)
|
2014-02-18 10:20:52 +01:00
|
|
|
|
2014-10-31 15:30:30 +01:00
|
|
|
prjs_or_pkg = [arg for arg in args if not arg.isdigit()]
|
2013-06-20 17:30:54 +02:00
|
|
|
ids = [arg for arg in args if arg.isdigit()]
|
2013-03-21 11:34:18 +01:00
|
|
|
|
2014-10-31 15:30:30 +01:00
|
|
|
# Recover the requests that are for this project or package and
|
|
|
|
# expand ids.
|
|
|
|
for pop in prjs_or_pkg:
|
|
|
|
# We try it as a project first
|
|
|
|
as_prj = pop
|
|
|
|
if ':' not in as_prj:
|
|
|
|
as_prj = self.checkrepo.staging.prj_from_letter(as_prj)
|
|
|
|
try:
|
|
|
|
meta = self.checkrepo.staging.get_prj_pseudometa(as_prj)
|
|
|
|
ids.extend(rq['id'] for rq in meta['requests'])
|
|
|
|
except:
|
|
|
|
# Now as a package
|
|
|
|
as_pkg = pop
|
|
|
|
srs = RequestFinder.find_sr([as_pkg], self.checkrepo.staging)
|
|
|
|
ids.extend(srs.keys())
|
2014-07-23 15:44:05 +02:00
|
|
|
|
2015-05-07 12:01:33 +02:00
|
|
|
if opts.skip:
|
|
|
|
if not len(ids):
|
|
|
|
raise oscerr.WrongArgs('Provide #IDs or package names to skip.')
|
|
|
|
|
|
|
|
for request_id in ids:
|
|
|
|
msg = 'skip review'
|
|
|
|
print 'ACCEPTED', msg
|
|
|
|
self.checkrepo.change_review_state(request_id, 'accepted', message=msg)
|
|
|
|
_request = self.checkrepo.get_request(request_id, internal=True)
|
|
|
|
self.checkrepo.remove_link_if_shadow_devel(_request)
|
|
|
|
return
|
|
|
|
|
2014-06-12 18:25:45 +02:00
|
|
|
# Store requests' package information and .spec files: store all
|
|
|
|
# source containers involved.
|
|
|
|
requests = []
|
2014-06-27 15:12:39 +02:00
|
|
|
print 'Pending requests list:'
|
|
|
|
print '----------------------'
|
2013-06-20 17:30:54 +02:00
|
|
|
if not ids:
|
2014-06-04 14:12:37 +02:00
|
|
|
# Return a list, we flat here with .extend()
|
|
|
|
for request in self.checkrepo.pending_requests():
|
2014-06-25 11:59:07 +02:00
|
|
|
request_and_specs = self.checkrepo.check_specs(request=request)
|
|
|
|
self._print_request_and_specs(request_and_specs)
|
|
|
|
requests.extend(request_and_specs)
|
2013-03-21 11:34:18 +01:00
|
|
|
else:
|
2014-06-05 17:44:02 +02:00
|
|
|
# We have a list, use them.
|
2014-06-04 14:12:37 +02:00
|
|
|
for request_id in ids:
|
2014-06-25 11:59:07 +02:00
|
|
|
request_and_specs = self.checkrepo.check_specs(request_id=request_id)
|
|
|
|
self._print_request_and_specs(request_and_specs)
|
|
|
|
requests.extend(request_and_specs)
|
2013-04-16 22:41:52 +02:00
|
|
|
|
2014-03-18 14:53:43 +01:00
|
|
|
# Order the packs before grouping
|
2014-06-12 18:25:45 +02:00
|
|
|
requests = sorted(requests, key=lambda p: p.request_id, reverse=True)
|
2014-03-18 14:53:43 +01:00
|
|
|
|
2014-06-16 17:57:40 +02:00
|
|
|
# Group the requests into staging projects (or alone if is an
|
|
|
|
# isolated request)
|
|
|
|
#
|
|
|
|
# For example:
|
|
|
|
# {
|
|
|
|
# 'openSUSE:Factory:Staging:J': [235851, 235753],
|
|
|
|
# 235856: [235856],
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# * The list of requests is not the full list of requests in this
|
|
|
|
# group / staging project, but only the ones listed as a
|
|
|
|
# paramenter.
|
|
|
|
#
|
|
|
|
# * The full list of requests can be found in
|
|
|
|
# self.checkrepo.groups['openSUSE:Factory:Staging:J']
|
|
|
|
#
|
2013-04-16 22:41:52 +02:00
|
|
|
groups = {}
|
2014-06-12 18:25:45 +02:00
|
|
|
for request in requests:
|
2014-06-16 17:57:40 +02:00
|
|
|
rqs = groups.get(request.group, [])
|
|
|
|
rqs.append(request)
|
|
|
|
groups[request.group] = rqs
|
2013-04-16 22:41:52 +02:00
|
|
|
|
2014-06-13 16:10:54 +02:00
|
|
|
# Mirror the packages locally in the CACHEDIR
|
2014-08-13 09:46:37 +02:00
|
|
|
self.repo_dir = '%s/repo-%s-%s-x86_64' % (CACHEDIR, 'openSUSE:{}'.format(opts.project), 'standard')
|
2014-08-29 15:49:05 +02:00
|
|
|
self._mirror_full(PLUGINDIR, self.repo_dir)
|
2014-03-28 16:13:52 +01:00
|
|
|
|
2014-06-27 15:12:39 +02:00
|
|
|
print
|
2014-07-01 19:55:17 +02:00
|
|
|
print 'Analysis results'
|
|
|
|
print '----------------'
|
2014-06-27 15:12:39 +02:00
|
|
|
print
|
|
|
|
|
2014-03-18 14:53:43 +01:00
|
|
|
# Sort the groups, from high to low. This put first the stating
|
|
|
|
# projects also
|
|
|
|
for id_, reqs in sorted(groups.items(), reverse=True):
|
2014-10-17 17:02:44 +02:00
|
|
|
try:
|
2015-03-05 12:44:57 +01:00
|
|
|
self._check_repo_group(id_, reqs,
|
|
|
|
skip_cycle=opts.skipcycle,
|
|
|
|
debug=opts.verbose)
|
2014-10-17 17:02:44 +02:00
|
|
|
except Exception as e:
|
2014-10-31 10:23:18 +01:00
|
|
|
print 'ERROR -- An exception happends while checking a group [%s]' % e
|
2015-02-10 17:22:00 +01:00
|
|
|
if conf.config['debug']:
|
2014-10-31 10:23:18 +01:00
|
|
|
print traceback.format_exc()
|
2014-06-27 15:12:39 +02:00
|
|
|
print
|
|
|
|
print
|