2015-03-19 16:42:31 +01:00
|
|
|
#!/usr/bin/python
|
|
|
|
# Copyright (c) 2015 SUSE Linux GmbH
|
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
|
|
|
|
|
|
|
from optparse import OptionParser
|
2015-03-26 13:29:09 +01:00
|
|
|
from pprint import pprint, pformat
|
2015-03-20 13:23:46 +01:00
|
|
|
from stat import S_ISREG, S_ISLNK
|
2015-03-23 16:54:31 +01:00
|
|
|
from tempfile import NamedTemporaryFile
|
2015-03-26 13:29:09 +01:00
|
|
|
import cmdln
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
2015-03-23 17:59:20 +01:00
|
|
|
import subprocess
|
2015-03-26 13:29:09 +01:00
|
|
|
import sys
|
2015-03-27 15:02:23 +01:00
|
|
|
import time
|
2015-03-26 16:21:25 +01:00
|
|
|
import abichecker_dbmodel as DB
|
2015-03-19 16:42:31 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
except ImportError:
|
|
|
|
import cElementTree as ET
|
|
|
|
|
|
|
|
import osc.conf
|
|
|
|
import osc.core
|
2015-03-23 17:59:20 +01:00
|
|
|
from osc.util.cpio import CpioRead
|
|
|
|
|
2015-03-19 16:42:31 +01:00
|
|
|
import urllib2
|
|
|
|
import rpm
|
|
|
|
from collections import namedtuple
|
2015-03-23 16:47:58 +01:00
|
|
|
from osclib.pkgcache import PkgCache
|
|
|
|
|
2015-03-25 18:15:48 +01:00
|
|
|
from xdg.BaseDirectory import save_cache_path
|
|
|
|
|
|
|
|
import ReviewBot
|
|
|
|
|
2015-03-23 16:47:58 +01:00
|
|
|
# Directory where download binary packages.
|
|
|
|
BINCACHE = os.path.expanduser('~/co')
|
|
|
|
DOWNLOADS = os.path.join(BINCACHE, 'downloads')
|
|
|
|
|
|
|
|
# Where the cache files are stored
|
|
|
|
CACHEDIR = save_cache_path('opensuse-abi-checker')
|
2015-03-26 13:29:09 +01:00
|
|
|
UNPACKDIR = os.path.join(CACHEDIR, 'unpacked')
|
2015-03-19 16:42:31 +01:00
|
|
|
|
2015-03-25 18:15:48 +01:00
|
|
|
so_re = re.compile(r'^(?:/usr)/lib(?:64)?/lib([^/]+)\.so(?:\.[^/]+)?')
|
|
|
|
debugpkg_re = re.compile(r'-debug(?:source|info)(?:-32bit)?$')
|
2015-03-31 10:31:59 +02:00
|
|
|
disturl_re = re.compile(r'^obs://[^/]+/(?P<prj>[^/]+)/(?P<repo>[^/]+)/(?P<md5>[0-9a-f]{32})-(?P<pkg>.*)$')
|
2015-03-25 18:15:48 +01:00
|
|
|
|
2015-03-26 13:29:09 +01:00
|
|
|
# report for source submissions. contains multiple libresult for each library
|
|
|
|
Report = namedtuple('Report', ('src_project', 'src_package', 'src_rev', 'dst_project', 'dst_package', 'reports', 'result'))
|
|
|
|
# report for a single library
|
|
|
|
LibResult = namedtuple('LibResult', ('src_repo', 'src_lib', 'dst_repo', 'dst_lib', 'arch', 'htmlreport', 'result'))
|
2015-03-19 16:42:31 +01:00
|
|
|
|
2015-03-31 10:31:59 +02:00
|
|
|
class DistUrlMismatch(Exception):
|
|
|
|
def __init__(self, disturl, prj, pkg, repo, vmd5):
|
|
|
|
Exception.__init__(self)
|
|
|
|
self.msg = 'disturl mismatch %s vs ...%s/%s/%s-%s'%(disturl, prj, repo, vmd5, pkg)
|
|
|
|
def __str__(self):
|
|
|
|
return self.msg
|
|
|
|
|
2015-03-19 16:42:31 +01:00
|
|
|
class ABIChecker(ReviewBot.ReviewBot):
|
|
|
|
""" check ABI of library packages
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ReviewBot.ReviewBot.__init__(self, *args, **kwargs)
|
|
|
|
|
|
|
|
self.ts = rpm.TransactionSet()
|
|
|
|
self.ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
|
2015-03-23 16:47:58 +01:00
|
|
|
|
|
|
|
self.pkgcache = PkgCache(BINCACHE)
|
|
|
|
|
2015-03-26 13:29:09 +01:00
|
|
|
# reports of source submission
|
|
|
|
self.reports = []
|
|
|
|
|
2015-03-26 16:21:25 +01:00
|
|
|
self.session = DB.db_session()
|
|
|
|
|
2015-03-19 16:42:31 +01:00
|
|
|
def check_source_submission(self, src_project, src_package, src_rev, dst_project, dst_package):
|
|
|
|
ReviewBot.ReviewBot.check_source_submission(self, src_project, src_package, src_rev, dst_project, dst_package)
|
2015-03-23 16:47:58 +01:00
|
|
|
|
2015-03-31 10:31:59 +02:00
|
|
|
dst_vmd5 = self._get_verifymd5(dst_project, dst_package)
|
|
|
|
if dst_vmd5 is None:
|
2015-03-19 16:42:31 +01:00
|
|
|
self.logger.info("%s/%s does not exist, skip"%(dst_project, dst_package))
|
|
|
|
return None
|
2015-03-31 10:31:59 +02:00
|
|
|
src_vmd5 = self._get_verifymd5(src_project, src_package)
|
|
|
|
if src_vmd5 is None:
|
|
|
|
self.logger.info("%s/%s does not exist, skip"%(src_project, src_package))
|
|
|
|
return None
|
2015-03-19 16:42:31 +01:00
|
|
|
|
2015-03-26 13:29:09 +01:00
|
|
|
if os.path.exists(UNPACKDIR):
|
|
|
|
shutil.rmtree(UNPACKDIR)
|
|
|
|
|
2015-03-25 18:15:48 +01:00
|
|
|
# compute list of common repos to find out what to compare
|
2015-03-19 16:42:31 +01:00
|
|
|
myrepos = self.findrepos(src_project, dst_project)
|
|
|
|
|
2015-03-26 13:29:09 +01:00
|
|
|
# TODO: check build completed
|
|
|
|
|
2015-03-19 16:42:31 +01:00
|
|
|
self.logger.debug(pformat(myrepos))
|
|
|
|
|
|
|
|
notes = []
|
2015-03-26 13:29:09 +01:00
|
|
|
libresults = []
|
|
|
|
|
|
|
|
overall = None
|
2015-03-19 16:42:31 +01:00
|
|
|
|
2015-03-23 16:54:31 +01:00
|
|
|
for mr in myrepos:
|
2015-03-31 10:31:59 +02:00
|
|
|
dst_libs = self.extract(dst_project, dst_package, dst_vmd5, mr.dstrepo, mr.arch)
|
|
|
|
src_libs = self.extract(src_project, src_package, dst_vmd5, mr.srcrepo, mr.arch)
|
|
|
|
|
|
|
|
if dst_libs is none or src_libs is None:
|
|
|
|
# nothing to fetch, so no libs
|
|
|
|
continue
|
2015-03-25 13:16:02 +01:00
|
|
|
|
2015-03-25 18:15:48 +01:00
|
|
|
# create reverse index for aliases in the source project
|
2015-03-25 13:16:02 +01:00
|
|
|
src_aliases = dict()
|
|
|
|
for lib in src_libs.keys():
|
|
|
|
for a in src_libs[lib]:
|
|
|
|
src_aliases.setdefault(a, set()).add(lib)
|
|
|
|
|
2015-03-25 18:15:48 +01:00
|
|
|
# for each library in the destination project check if the same lib
|
|
|
|
# exists in the source project. If not check the aliases (symlinks)
|
|
|
|
# to catch soname changes. Generate pairs of matching libraries.
|
2015-03-25 13:16:02 +01:00
|
|
|
pairs = set()
|
|
|
|
for lib in dst_libs.keys():
|
|
|
|
if lib in src_libs:
|
2015-03-27 15:02:23 +01:00
|
|
|
pairs.add((lib, lib))
|
2015-03-25 13:16:02 +01:00
|
|
|
else:
|
|
|
|
self.logger.debug("%s not found in submission, checking aliases", lib)
|
|
|
|
for a in dst_libs[lib]:
|
|
|
|
if a in src_aliases:
|
|
|
|
for l in src_aliases[a]:
|
|
|
|
pairs.add((lib, l))
|
|
|
|
|
|
|
|
self.logger.debug("to diff: %s", pformat(pairs))
|
2015-03-27 15:02:23 +01:00
|
|
|
|
2015-03-25 18:15:48 +01:00
|
|
|
# for each pair dump and compare the abi
|
|
|
|
for old, new in pairs:
|
|
|
|
# abi dump of old lib
|
2015-03-26 13:29:09 +01:00
|
|
|
new_base = os.path.join(UNPACKDIR, dst_project, dst_package, mr.dstrepo, mr.arch)
|
2015-03-25 18:15:48 +01:00
|
|
|
old_dump = os.path.join(CACHEDIR, 'old.dump')
|
|
|
|
# abi dump of new lib
|
2015-03-26 13:29:09 +01:00
|
|
|
old_base = os.path.join(UNPACKDIR, src_project, src_package, mr.srcrepo, mr.arch)
|
2015-03-25 18:15:48 +01:00
|
|
|
new_dump = os.path.join(CACHEDIR, 'new.dump')
|
|
|
|
|
2015-03-26 13:29:09 +01:00
|
|
|
def cleanup():
|
|
|
|
if os.path.exists(old_dump):
|
|
|
|
os.unlink(old_dump)
|
|
|
|
if os.path.exists(new_dump):
|
|
|
|
os.unlink(new_dump)
|
|
|
|
|
|
|
|
cleanup()
|
|
|
|
|
|
|
|
# we just need that to pass a name to abi checker
|
2015-03-25 18:15:48 +01:00
|
|
|
m = so_re.match(old)
|
2015-03-27 15:02:23 +01:00
|
|
|
htmlreport = 'report-%s-%s-%s-%s-%s-%08x.html'%(mr.srcrepo, os.path.basename(old), mr.dstrepo, os.path.basename(new), mr.arch, time.time())
|
2015-03-25 18:15:48 +01:00
|
|
|
|
2015-03-26 13:29:09 +01:00
|
|
|
# run abichecker
|
2015-03-25 18:15:48 +01:00
|
|
|
if m \
|
|
|
|
and self.run_abi_dumper(old_dump, new_base, old) \
|
|
|
|
and self.run_abi_dumper(new_dump, old_base, new):
|
2015-03-26 13:29:09 +01:00
|
|
|
reportfn = os.path.join(CACHEDIR, htmlreport)
|
|
|
|
r = self.run_abi_checker(m.group(1), old_dump, new_dump, reportfn)
|
|
|
|
if r is not None:
|
|
|
|
self.logger.info('report saved to %s, compatible: %d', reportfn, r)
|
|
|
|
libresults.append(LibResult(mr.srcrepo, os.path.basename(old), mr.dstrepo, os.path.basename(new), mr.arch, htmlreport, r))
|
|
|
|
if overall is None:
|
|
|
|
overall = r
|
|
|
|
elif overall == True and r == False:
|
|
|
|
overall = r
|
2015-03-25 18:15:48 +01:00
|
|
|
else:
|
|
|
|
self.logger.error('failed to compare %s <> %s'%(old,new))
|
|
|
|
# XXX: report
|
2015-03-19 16:42:31 +01:00
|
|
|
|
2015-03-26 13:29:09 +01:00
|
|
|
cleanup()
|
|
|
|
|
|
|
|
if libresults != [] and overall is not None:
|
|
|
|
self.reports.append(Report(src_project, src_package, src_rev, dst_project, dst_package, libresults, overall))
|
|
|
|
|
|
|
|
# upload reports
|
|
|
|
|
|
|
|
if os.path.exists(UNPACKDIR):
|
|
|
|
shutil.rmtree(UNPACKDIR)
|
|
|
|
|
|
|
|
# we always accept the review, just leave a note if there were problems
|
|
|
|
return True
|
|
|
|
|
|
|
|
def check_one_request(self, req):
|
|
|
|
|
|
|
|
self.reports = []
|
|
|
|
ret = ReviewBot.ReviewBot.check_one_request(self, req)
|
|
|
|
|
2015-03-27 15:27:47 +01:00
|
|
|
self.save_reports_to_db(req)
|
2015-03-26 16:21:25 +01:00
|
|
|
|
2015-03-26 13:29:09 +01:00
|
|
|
return ret
|
2015-03-19 16:59:15 +01:00
|
|
|
|
2015-03-27 15:27:47 +01:00
|
|
|
def save_reports_to_db(self, req):
|
2015-03-26 16:21:25 +01:00
|
|
|
for r in self.reports:
|
2015-03-27 15:02:23 +01:00
|
|
|
abicheck = DB.ABICheck(
|
2015-03-27 15:27:47 +01:00
|
|
|
request_id = req.reqid,
|
2015-03-26 16:21:25 +01:00
|
|
|
src_project = r.src_project,
|
|
|
|
src_package = r.src_package,
|
|
|
|
src_rev = r.src_rev,
|
|
|
|
dst_project = r.dst_project,
|
|
|
|
dst_package = r.dst_package,
|
|
|
|
result = r.result,
|
|
|
|
)
|
2015-03-27 15:02:23 +01:00
|
|
|
self.session.add(abicheck)
|
|
|
|
self.session.commit()
|
|
|
|
self.logger.info("id %d"%abicheck.id)
|
2015-03-26 16:21:25 +01:00
|
|
|
for lr in r.reports:
|
|
|
|
libreport = DB.LibReport(
|
2015-03-27 15:02:23 +01:00
|
|
|
abicheck = abicheck,
|
2015-03-26 16:21:25 +01:00
|
|
|
src_repo = lr.src_repo,
|
|
|
|
src_lib = lr.src_lib,
|
|
|
|
dst_repo = lr.dst_repo,
|
|
|
|
dst_lib = lr.dst_lib,
|
|
|
|
arch = lr.arch,
|
|
|
|
htmlreport = lr.htmlreport,
|
|
|
|
result = lr.result,
|
|
|
|
)
|
|
|
|
self.session.add(libreport)
|
2015-03-27 15:02:23 +01:00
|
|
|
self.session.commit()
|
2015-03-26 16:21:25 +01:00
|
|
|
|
|
|
|
self.reports = []
|
|
|
|
|
2015-03-25 18:15:48 +01:00
|
|
|
def run_abi_checker(self, libname, old, new, output):
|
|
|
|
cmd = ['abi-compliance-checker',
|
|
|
|
'-lib', libname,
|
|
|
|
'-old', old,
|
|
|
|
'-new', new,
|
|
|
|
'-report-path', output
|
|
|
|
]
|
|
|
|
self.logger.debug(cmd)
|
|
|
|
r = subprocess.call(cmd, close_fds=True)
|
|
|
|
if not r in (0, 1):
|
|
|
|
self.logger.error('abi-compliance-checker failed')
|
|
|
|
# XXX: record error
|
2015-03-26 13:29:09 +01:00
|
|
|
return None
|
|
|
|
return r == 0
|
2015-03-25 18:15:48 +01:00
|
|
|
|
|
|
|
def run_abi_dumper(self, output, base, filename):
|
|
|
|
cmd = ['abi-dumper',
|
|
|
|
'-o', output,
|
|
|
|
'-lver', os.path.basename(filename),
|
|
|
|
'-debuginfo-dir', '%s/usr/lib/debug/%s'%(base, os.path.dirname(filename)),
|
|
|
|
'/'.join([base, filename])]
|
|
|
|
self.logger.debug(cmd)
|
|
|
|
r = subprocess.call(cmd, close_fds=True)
|
|
|
|
if r != 0:
|
|
|
|
self.logger.error("failed to dump %s!"%filename)
|
|
|
|
# XXX: record error
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2015-03-31 10:31:59 +02:00
|
|
|
def extract(self, project, package, vmd5, repo, arch):
|
2015-03-23 16:54:31 +01:00
|
|
|
# fetch cpio headers
|
|
|
|
# check file lists for library packages
|
2015-03-31 10:31:59 +02:00
|
|
|
fetchlist, liblist = self.compute_fetchlist(project, package, vmd5, repo, arch)
|
2015-03-23 17:59:20 +01:00
|
|
|
|
|
|
|
if not fetchlist:
|
2015-03-31 10:31:59 +02:00
|
|
|
self.logger.info("nothing to fetch for %s/%s %s/%s"%(project, package, repo, arch))
|
2015-03-23 17:59:20 +01:00
|
|
|
# XXX record
|
2015-03-31 10:31:59 +02:00
|
|
|
return None
|
2015-03-23 17:59:20 +01:00
|
|
|
|
|
|
|
# mtimes in cpio are not the original ones, so we need to fetch
|
|
|
|
# that separately :-(
|
2015-03-23 16:54:31 +01:00
|
|
|
mtimes= self._getmtimes(project, package, repo, arch)
|
2015-03-23 17:59:20 +01:00
|
|
|
|
|
|
|
self.logger.debug("fetchlist %s", pformat(fetchlist))
|
|
|
|
self.logger.debug("liblist %s", pformat(liblist))
|
2015-03-19 16:42:31 +01:00
|
|
|
|
2015-03-25 18:15:48 +01:00
|
|
|
debugfiles = set(['/usr/lib/debug%s.debug'%f for f in liblist])
|
|
|
|
|
2015-03-23 16:47:58 +01:00
|
|
|
# fetch binary rpms
|
2015-03-23 17:59:20 +01:00
|
|
|
downloaded = self.download_files(project, package, repo, arch, fetchlist, mtimes)
|
2015-03-19 16:42:31 +01:00
|
|
|
|
2015-03-23 16:47:58 +01:00
|
|
|
# extract binary rpms
|
2015-03-23 17:59:20 +01:00
|
|
|
tmpfile = os.path.join(CACHEDIR, "cpio")
|
|
|
|
for fn in fetchlist:
|
|
|
|
self.logger.debug("extract %s"%fn)
|
|
|
|
with open(tmpfile, 'wb') as tmpfd:
|
|
|
|
if not fn in downloaded:
|
|
|
|
self.logger.error("%s was not downloaded!"%fn)
|
|
|
|
# XXX: record error
|
|
|
|
continue
|
|
|
|
self.logger.debug(downloaded[fn])
|
|
|
|
r = subprocess.call(['rpm2cpio', downloaded[fn]], stdout=tmpfd, close_fds=True)
|
|
|
|
if r != 0:
|
|
|
|
self.logger.error("failed to extract %s!"%fn)
|
|
|
|
# XXX: record error
|
|
|
|
continue
|
|
|
|
tmpfd.close()
|
|
|
|
cpio = CpioRead(tmpfile)
|
|
|
|
cpio.read()
|
|
|
|
for ch in cpio:
|
|
|
|
fn = ch.filename
|
|
|
|
if fn.startswith('./'): # rpm payload is relative
|
|
|
|
fn = fn[1:]
|
|
|
|
self.logger.debug("cpio fn %s", fn)
|
2015-03-25 18:15:48 +01:00
|
|
|
if not fn in liblist and not fn in debugfiles:
|
2015-03-23 17:59:20 +01:00
|
|
|
continue
|
2015-03-26 13:29:09 +01:00
|
|
|
dst = os.path.join(UNPACKDIR, project, package, repo, arch)
|
2015-03-23 17:59:20 +01:00
|
|
|
dst += fn
|
|
|
|
if not os.path.exists(os.path.dirname(dst)):
|
|
|
|
os.makedirs(os.path.dirname(dst))
|
|
|
|
self.logger.debug("dst %s", dst)
|
|
|
|
# the filehandle in the cpio archive is private so
|
|
|
|
# open it again
|
|
|
|
with open(tmpfile, 'rb') as cpiofh:
|
|
|
|
cpiofh.seek(ch.dataoff, os.SEEK_SET)
|
|
|
|
with open(dst, 'wb') as fh:
|
|
|
|
while True:
|
|
|
|
buf = cpiofh.read(4096)
|
|
|
|
if buf is None or buf == '':
|
|
|
|
break
|
|
|
|
fh.write(buf)
|
|
|
|
os.unlink(tmpfile)
|
2015-03-19 16:42:31 +01:00
|
|
|
|
2015-03-25 13:16:02 +01:00
|
|
|
return liblist
|
|
|
|
|
2015-03-23 16:47:58 +01:00
|
|
|
def download_files(self, project, package, repo, arch, filenames, mtimes):
|
2015-03-23 17:59:20 +01:00
|
|
|
downloaded = dict()
|
2015-03-23 16:47:58 +01:00
|
|
|
for fn in filenames:
|
|
|
|
if not fn in mtimes:
|
|
|
|
self.logger.error("missing mtime information for %s, can't check"% fn)
|
|
|
|
# XXX record error
|
|
|
|
continue
|
|
|
|
repodir = os.path.join(DOWNLOADS, package, project, repo)
|
|
|
|
if not os.path.exists(repodir):
|
|
|
|
os.makedirs(repodir)
|
|
|
|
t = os.path.join(repodir, fn)
|
|
|
|
self._get_binary_file(project, repo, arch, package, fn, t, mtimes[fn])
|
2015-03-23 17:59:20 +01:00
|
|
|
downloaded[fn] = t
|
|
|
|
return downloaded
|
2015-03-23 16:47:58 +01:00
|
|
|
|
|
|
|
# XXX: from repochecker
|
|
|
|
def _get_binary_file(self, project, repository, arch, package, filename, target, mtime):
|
|
|
|
"""Get a binary file from OBS."""
|
|
|
|
# Check if the file is already there.
|
|
|
|
key = (project, repository, arch, package, filename, mtime)
|
|
|
|
if key in self.pkgcache:
|
|
|
|
try:
|
|
|
|
os.unlink(target)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
self.pkgcache.linkto(key, target)
|
|
|
|
else:
|
|
|
|
osc.core.get_binary_file(self.apiurl, project, repository, arch,
|
|
|
|
filename, package=package,
|
|
|
|
target_filename=target)
|
|
|
|
self.pkgcache[key] = target
|
|
|
|
|
2015-03-19 16:42:31 +01:00
|
|
|
def readRpmHeaderFD(self, fd):
|
|
|
|
h = None
|
|
|
|
try:
|
|
|
|
h = self.ts.hdrFromFdno(fd)
|
|
|
|
except rpm.error, e:
|
|
|
|
if str(e) == "public key not available":
|
|
|
|
print str(e)
|
|
|
|
if str(e) == "public key not trusted":
|
|
|
|
print str(e)
|
|
|
|
if str(e) == "error reading package header":
|
|
|
|
print str(e)
|
|
|
|
h = None
|
|
|
|
return h
|
|
|
|
|
|
|
|
def _fetchcpioheaders(self, project, package, repo, arch):
|
|
|
|
u = osc.core.makeurl(self.apiurl, [ 'build', project, repo, arch, package ],
|
|
|
|
[ 'view=cpioheaders' ])
|
2015-03-23 14:50:52 +01:00
|
|
|
try:
|
|
|
|
r = osc.core.http_GET(u)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
self.logger.error('failed to fetch header information')
|
|
|
|
raise StopIteration
|
2015-03-19 16:42:31 +01:00
|
|
|
tmpfile = NamedTemporaryFile(prefix="cpio-", delete=False)
|
|
|
|
for chunk in r:
|
|
|
|
tmpfile.write(chunk)
|
|
|
|
tmpfile.close()
|
|
|
|
cpio = CpioRead(tmpfile.name)
|
|
|
|
cpio.read()
|
2015-03-23 16:47:58 +01:00
|
|
|
rpm_re = re.compile('(.+\.rpm)-[0-9A-Fa-f]{32}$')
|
2015-03-19 16:42:31 +01:00
|
|
|
for ch in cpio:
|
|
|
|
# ignore errors
|
|
|
|
if ch.filename == '.errors':
|
|
|
|
continue
|
|
|
|
# the filehandle in the cpio archive is private so
|
|
|
|
# open it again
|
|
|
|
with open(tmpfile.name, 'rb') as fh:
|
|
|
|
fh.seek(ch.dataoff, os.SEEK_SET)
|
|
|
|
h = self.readRpmHeaderFD(fh)
|
|
|
|
if h is None:
|
|
|
|
self.logger.warn("failed to read rpm header for %s"%ch.filename)
|
2015-03-23 16:47:58 +01:00
|
|
|
continue
|
|
|
|
m = rpm_re.match(ch.filename)
|
|
|
|
if m:
|
|
|
|
yield m.group(1), h
|
2015-03-19 16:42:31 +01:00
|
|
|
os.unlink(tmpfile.name)
|
|
|
|
|
2015-03-23 16:47:58 +01:00
|
|
|
def _getmtimes(self, prj, pkg, repo, arch):
|
|
|
|
""" returns a dict of filename: mtime """
|
|
|
|
url = osc.core.makeurl(self.apiurl, ('build', prj, repo, arch, pkg))
|
|
|
|
try:
|
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return dict([(node.attrib['filename'], node.attrib['mtime']) for node in root.findall('binary')])
|
|
|
|
|
|
|
|
|
2015-03-19 16:42:31 +01:00
|
|
|
def findrepos(self, src_project, dst_project):
|
|
|
|
url = osc.core.makeurl(self.apiurl, ('source', dst_project, '_meta'))
|
|
|
|
try:
|
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# build list of target repos as set of name, arch
|
|
|
|
dstrepos = set()
|
|
|
|
for repo in root.findall('repository'):
|
|
|
|
name = repo.attrib['name']
|
|
|
|
for node in repo.findall('arch'):
|
|
|
|
dstrepos.add((name, node.text))
|
|
|
|
|
|
|
|
url = osc.core.makeurl(self.apiurl, ('source', src_project, '_meta'))
|
|
|
|
try:
|
|
|
|
root = ET.parse(osc.core.http_GET(url)).getroot()
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
MR = namedtuple('MatchRepo', ('srcrepo', 'dstrepo', 'arch'))
|
|
|
|
# set of source repo name, target repo name, arch
|
|
|
|
matchrepos = set()
|
|
|
|
for repo in root.findall('repository'):
|
|
|
|
name = repo.attrib['name']
|
|
|
|
path = repo.findall('path')
|
|
|
|
if path is None or len(path) != 1:
|
|
|
|
continue
|
|
|
|
prj = path[0].attrib['project']
|
|
|
|
if prj == 'openSUSE:Tumbleweed':
|
|
|
|
prj = 'openSUSE:Factory' # XXX: hack
|
|
|
|
if prj != dst_project:
|
|
|
|
continue
|
|
|
|
for node in repo.findall('arch'):
|
|
|
|
arch = node.text
|
|
|
|
dstname = path[0].attrib['repository']
|
|
|
|
if (dstname, arch) in dstrepos:
|
|
|
|
matchrepos.add(MR(name, dstname, arch))
|
|
|
|
|
|
|
|
return matchrepos
|
|
|
|
|
2015-03-31 10:31:59 +02:00
|
|
|
def disturl_matches(self, disturl, prj, pkg, repo, vmd5):
|
|
|
|
self.logger.warning("%s %s"%(disturl, vmd5))
|
|
|
|
m = disturl_re.match(disturl)
|
|
|
|
self.logger.warning(m)
|
|
|
|
if m is None \
|
|
|
|
or m.group('prj') != prj \
|
|
|
|
or m.group('pkg') != pkg \
|
|
|
|
or m.group('repo') != repo \
|
|
|
|
or m.group('md5') != vmd5:
|
|
|
|
self.logger.warning('disturl mismatch %s vs ...%s/%s/%s-%s'%(disturl, prj, repo, vmd5, pkg))
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
def compute_fetchlist(self, prj, pkg, vmd5, repo, arch):
|
|
|
|
""" scan binary rpms of the specified repo for libraries.
|
|
|
|
Returns a set of packages to fetch and the libraries found
|
|
|
|
"""
|
2015-03-23 16:54:31 +01:00
|
|
|
self.logger.debug('scanning %s/%s %s/%s'%(prj, pkg, repo, arch))
|
|
|
|
|
|
|
|
headers = self._fetchcpioheaders(prj, pkg, repo, arch)
|
|
|
|
missing_debuginfo = set()
|
|
|
|
lib_packages = dict() # pkgname -> set(lib file names)
|
|
|
|
pkgs = dict() # pkgname -> cpiohdr, rpmhdr
|
|
|
|
lib_aliases = dict()
|
|
|
|
for rpmfn, h in headers:
|
|
|
|
# skip src rpm
|
|
|
|
if h['sourcepackage']:
|
|
|
|
continue
|
|
|
|
pkgname = h['name']
|
|
|
|
self.logger.debug(pkgname)
|
2015-03-31 10:31:59 +02:00
|
|
|
if not self.disturl_matches(h['disturl'], prj, pkg, repo, vmd5):
|
|
|
|
raise DistUrlMismatch(h['disturl'], prj, pkg, repo, vmd5)
|
2015-03-23 16:54:31 +01:00
|
|
|
pkgs[pkgname] = (rpmfn, h)
|
|
|
|
if debugpkg_re.match(pkgname):
|
|
|
|
continue
|
|
|
|
for fn, mode, lnk in zip(h['filenames'], h['filemodes'], h['filelinktos']):
|
|
|
|
if so_re.match(fn):
|
|
|
|
if S_ISREG(mode):
|
|
|
|
self.logger.debug('found lib: %s'%fn)
|
|
|
|
lib_packages.setdefault(pkgname, set()).add(fn)
|
|
|
|
elif S_ISLNK(mode) and lnk is not None:
|
|
|
|
alias = os.path.basename(fn)
|
|
|
|
libname = os.path.basename(lnk)
|
|
|
|
self.logger.debug('found alias: %s -> %s'%(alias, libname))
|
2015-03-25 13:16:02 +01:00
|
|
|
lib_aliases.setdefault(libname, set()).add(alias)
|
2015-03-23 16:54:31 +01:00
|
|
|
|
|
|
|
fetchlist = set()
|
2015-03-25 13:16:02 +01:00
|
|
|
liblist = dict()
|
2015-03-23 16:54:31 +01:00
|
|
|
# check whether debug info exists for each lib
|
|
|
|
for pkgname in sorted(lib_packages.keys()):
|
|
|
|
# 32bit debug packages have special names
|
|
|
|
if pkgname.endswith('-32bit'):
|
|
|
|
dpkgname = pkgname[:-len('-32bit')]+'-debuginfo-32bit'
|
|
|
|
else:
|
|
|
|
dpkgname = pkgname+'-debuginfo'
|
|
|
|
if not dpkgname in pkgs:
|
|
|
|
missing_debuginfo.add((prj, pkg, repo, arch, pkgname))
|
|
|
|
continue
|
|
|
|
|
|
|
|
# check file list of debuginfo package
|
|
|
|
rpmfn, h = pkgs[dpkgname]
|
|
|
|
files = set (h['filenames'])
|
|
|
|
ok = True
|
|
|
|
for lib in lib_packages[pkgname]:
|
|
|
|
fn = '/usr/lib/debug%s.debug'%lib
|
|
|
|
if not fn in files:
|
|
|
|
missing_debuginfo.add((prj, pkg, repo, arch, pkgname, lib))
|
|
|
|
ok = False
|
|
|
|
if ok:
|
|
|
|
fetchlist.add(pkgs[pkgname][0])
|
|
|
|
fetchlist.add(rpmfn)
|
2015-03-25 13:16:02 +01:00
|
|
|
liblist.setdefault(lib, set())
|
|
|
|
libname = os.path.basename(lib)
|
|
|
|
if libname in lib_aliases:
|
|
|
|
liblist[lib] |= lib_aliases[libname]
|
2015-03-23 16:54:31 +01:00
|
|
|
|
|
|
|
if missing_debuginfo:
|
|
|
|
self.logger.error('missing debuginfo: %s'%pformat(missing_debuginfo))
|
|
|
|
return None
|
|
|
|
|
2015-03-25 13:16:02 +01:00
|
|
|
return fetchlist, liblist
|
2015-03-23 16:54:31 +01:00
|
|
|
|
2015-03-19 16:42:31 +01:00
|
|
|
class CommandLineInterface(ReviewBot.CommandLineInterface):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ReviewBot.CommandLineInterface.__init__(self, args, kwargs)
|
|
|
|
|
|
|
|
def setup_checker(self):
|
|
|
|
|
|
|
|
apiurl = osc.conf.config['apiurl']
|
|
|
|
if apiurl is None:
|
|
|
|
raise osc.oscerr.ConfigError("missing apiurl")
|
|
|
|
user = self.options.user
|
|
|
|
if user is None:
|
|
|
|
user = osc.conf.get_apiurl_usr(apiurl)
|
|
|
|
|
|
|
|
return ABIChecker(apiurl = apiurl, \
|
|
|
|
dryrun = self.options.dry, \
|
|
|
|
user = user, \
|
|
|
|
logger = self.logger)
|
|
|
|
|
2015-03-23 14:50:52 +01:00
|
|
|
@cmdln.option('-r', '--revision', metavar="number", type="int", help="revision number")
|
|
|
|
def do_diff(self, subcmd, opts, src_project, src_package, dst_project, dst_package):
|
|
|
|
src_rev = opts.revision
|
|
|
|
print self.checker.check_source_submission(src_project, src_package, src_rev, dst_project, dst_package)
|
|
|
|
|
2015-03-19 16:42:31 +01:00
|
|
|
if __name__ == "__main__":
|
|
|
|
app = CommandLineInterface()
|
|
|
|
sys.exit( app.main() )
|
|
|
|
|
|
|
|
# vim: sw=4 et
|