Merge pull request #253 from aplanas/master

Do not download requests without RPM.
This commit is contained in:
Stephan Kulow 2014-09-29 21:58:44 +02:00
commit e236f2217a
2 changed files with 11 additions and 127 deletions

View File

@ -20,7 +20,6 @@
from collections import defaultdict
from collections import namedtuple
import os
import re
import shutil
import subprocess
import tempfile
@ -39,50 +38,9 @@ from osclib.cycle import CycleDetector
from osclib.memoize import CACHEDIR
def _fix_local_cache(self, request, keys):
"""Remove old package versions from the cache."""
def _cmp(x, y):
x = [int(i) for i in x.split('.')]
y = [int(i) for i in y.split('.')]
return cmp(x, y)
# Detect the most updated version
dirnames = set()
newest = (None, None, None, None)
for key in keys:
for full_filename in request.downloads[key]:
dirname = os.path.dirname(full_filename)
dirnames.add(dirname)
filename = os.path.basename(full_filename)
result = re.match(r'.*-([^-]*)-([^-]*)\.[^-\.]+\.rpm', filename)
if result:
version, revision = result.groups()
_, cversion, crevision, _ = newest
if (not cversion
or _cmp(version, cversion) > 0
or (not _cmp(version, cversion) and _cmp(revision, crevision) > 0)):
newest = (key, version, revision, dirname)
# Remove the rest
most_updated_key, _, _, correct_dirname = newest
dirnames.remove(correct_dirname)
for dirname in dirnames:
shutil.rmtree(dirname)
return [most_updated_key]
def _check_repo_download(self, request):
request.downloads = defaultdict(list)
# Found cached version for the request, but the cache can be
# partial. For example, in a rebuild we can have locally a
# working package. So the download list needs to be updated with
# the local copies.
if request.is_partially_cached:
request.downloads = self.checkrepo._get_downloads_from_local(request)
if request.build_excluded:
return set()
@ -284,16 +242,7 @@ def _check_repo_group(self, id_, requests, debug=False):
print 'DEBUG Keys', keys
if keys:
# From time to time, a rebuild of a package can create
# another revision. In this case can happend that an
# old revision of the package with a still valid
# DISTURL is also in the cache. In this case
# `len(keys) > 1`, and one workaround is to take the
# newest package and remove the old one.
if len(keys) > 1:
keys = self._fix_local_cache(rq, keys)
if DEBUG_PLAN:
print 'DEBUG Cleaning the cache. New keys', keys
assert len(keys) == 1, 'Found more than one candidate for the plan'
execution_plan[plan].append((rq, plan, rq.downloads[keys[0]]))
if DEBUG_PLAN:
@ -531,3 +480,6 @@ def do_check_repo(self, subcmd, opts, *args):
self._check_repo_group(id_, reqs, debug=opts.verbose)
print
print
# Makes sure to remove the cache for now
shutil.rmtree(CACHEDIR)

View File

@ -14,7 +14,6 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from collections import defaultdict
import os
import re
import subprocess
@ -64,7 +63,6 @@ class Request(object):
self.updated = False
self.error = None
self.build_excluded = False
self.is_partially_cached = False
self.action_type = 'submit' # assume default
self.downloads = []
self.is_shadow_devel = False
@ -574,11 +572,7 @@ class CheckRepo(object):
if not os.path.exists(repodir):
os.makedirs(repodir)
t = os.path.join(repodir, fn)
was_cached = self._get_binary_file(_project, _repo, arch,
request.src_package,
fn, t, mt)
if was_cached:
continue
self._get_binary_file(_project, _repo, arch, request.src_package, fn, t, mt)
# Organize the files into DISTURL directories.
disturl = self._md5_disturl(self._disturl(t))
@ -595,16 +589,16 @@ class CheckRepo(object):
request.downloads[(_project, _repo, disturl)].append(file_in_disturl)
# Some subpackage do not have any rpm (e.g. rpmlint)
if not last_disturldir:
return
for _project, _repo, arch, fn, mt in todownload_rest:
repodir = os.path.join(DOWNLOADS, request.src_package, _project, _repo)
if not os.path.exists(repodir):
os.makedirs(repodir)
t = os.path.join(repodir, fn)
was_cached = self._get_binary_file(_project, _repo, arch,
request.src_package,
fn, t, mt)
if was_cached:
continue
self._get_binary_file(_project, _repo, arch, request.src_package, fn, t, mt)
file_in_disturl = os.path.join(last_disturldir, fn)
if last_disturldir:
@ -686,58 +680,6 @@ class CheckRepo(object):
return False
def is_request_cached(self, request):
"""Search the request in the local cache."""
result = False
package_dir = os.path.join(DOWNLOADS, request.src_package)
rpm_packages = []
for dirpath, dirnames, filenames in os.walk(package_dir):
rpm_packages.extend(os.path.join(dirpath, f) for f in filenames if f.endswith('.rpm'))
result = any(self.check_disturl(request, filename=rpm) for rpm in rpm_packages)
return result
def _get_goodrepos_from_local(self, request):
"""Calculate 'goodrepos' from local cache."""
# 'goodrepos' store the tuples (project, repos)
goodrepos = []
package_dir = os.path.join(DOWNLOADS, request.src_package)
projects = os.walk(package_dir).next()[1]
# XXX TODO - The generated list can be false, we need to check
# if this is still a goodrepo. To do this we need to check
# verifymd5 or something like that, because the build status
# can be a different value from 'success'.
for project in projects:
project_dir = os.path.join(package_dir, project)
repos = os.walk(project_dir).next()[1]
for repo in repos:
goodrepos.append((project, repo))
return goodrepos
def _get_downloads_from_local(self, request):
"""Calculate 'downloads' from local cache."""
downloads = defaultdict(list)
package_dir = os.path.join(DOWNLOADS, request.src_package)
for project, repo in self._get_goodrepos_from_local(request):
repo_dir = os.path.join(package_dir, project, repo)
disturls = os.walk(repo_dir).next()[1]
for disturl in disturls:
disturl_dir = os.path.join(DOWNLOADS, request.src_package, project, repo, disturl)
filenames = os.walk(disturl_dir).next()[2]
downloads[(project, repo, disturl)] = [os.path.join(disturl_dir, f) for f in filenames]
return downloads
def get_missings(self, request):
"""Get the list of packages that are in missing status."""
@ -771,14 +713,6 @@ class CheckRepo(object):
"""
# Check if we have a local version of the package before
# checking it. If this is the case partially preload the
# 'goodrepos' and 'missings' fields.
if self.is_request_cached(request):
request.is_partially_cached = True
request.goodrepos = self._get_goodrepos_from_local(request)
request.missings = self.get_missings(request)
# If the request do not build properly in both Intel platforms,
# return False.
repos_to_check = self.repositories_to_check(request)
@ -847,9 +781,7 @@ class CheckRepo(object):
if r_foundfailed:
foundfailed = r_foundfailed
# If the request is partially cached, maybe there are some
# content in request.missings.
request.missings = sorted(set(request.missings) | missings)
request.missings = sorted(missings)
if result:
return True