2009-03-02 15:03:06 +01:00
|
|
|
# Copyright (C) 2006 Novell Inc. All rights reserved.
|
2006-07-14 19:39:46 +02:00
|
|
|
# This program is free software; it may be used, copied, modified
|
|
|
|
# and distributed under the terms of the GNU General Public Licence,
|
|
|
|
# either version 2, or (at your option) any later version.
|
|
|
|
|
|
|
|
import sys, os
|
|
|
|
import urllib2
|
2010-04-07 03:06:03 +02:00
|
|
|
from urlgrabber.grabber import URLGrabError
|
2006-07-14 19:39:46 +02:00
|
|
|
from urlgrabber.mirror import MirrorGroup
|
2010-04-07 03:06:03 +02:00
|
|
|
from core import makeurl, streamfile
|
2010-02-12 10:17:34 +01:00
|
|
|
from util import packagequery, cpio
|
2009-06-16 14:46:02 +02:00
|
|
|
import conf
|
2010-04-17 15:29:03 +02:00
|
|
|
import oscerr
|
2010-02-12 10:17:34 +01:00
|
|
|
import tempfile
|
2012-04-03 15:59:42 +02:00
|
|
|
import re
|
2006-07-14 19:39:46 +02:00
|
|
|
try:
|
|
|
|
from meter import TextMeter
|
|
|
|
except:
|
|
|
|
TextMeter = None
|
|
|
|
|
|
|
|
|
|
|
|
def join_url(self, base_url, rel_url):
|
|
|
|
"""to override _join_url of MirrorGroup, because we want to
|
|
|
|
pass full URLs instead of base URL where relative_url is added later...
|
2009-10-20 16:30:15 +02:00
|
|
|
IOW, we make MirrorGroup ignore relative_url"""
|
2006-07-14 19:39:46 +02:00
|
|
|
return base_url
|
|
|
|
|
2010-04-07 03:06:03 +02:00
|
|
|
class OscFileGrabber:
|
|
|
|
def __init__(self, progress_obj = None):
|
|
|
|
self.progress_obj = progress_obj
|
|
|
|
|
|
|
|
def urlgrab(self, url, filename, text = None, **kwargs):
|
|
|
|
if url.startswith('file://'):
|
|
|
|
file = url.replace('file://', '', 1)
|
|
|
|
if os.path.isfile(file):
|
|
|
|
return file
|
|
|
|
else:
|
|
|
|
raise URLGrabError(2, 'Local file \'%s\' does not exist' % file)
|
|
|
|
f = open(filename, 'wb')
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
for i in streamfile(url, progress_obj=self.progress_obj, text=text):
|
|
|
|
f.write(i)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
exc = URLGrabError(14, str(e))
|
|
|
|
exc.url = url
|
|
|
|
exc.exception = e
|
|
|
|
exc.code = e.code
|
|
|
|
raise exc
|
2010-05-06 22:40:35 +02:00
|
|
|
except IOError, e:
|
|
|
|
raise URLGrabError(4, str(e))
|
2010-04-07 03:06:03 +02:00
|
|
|
finally:
|
|
|
|
f.close()
|
|
|
|
return filename
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
class Fetcher:
|
2010-01-20 09:15:17 +01:00
|
|
|
def __init__(self, cachedir = '/tmp', api_host_options = {}, urllist = [], http_debug = False,
|
2010-09-07 16:08:05 +02:00
|
|
|
cookiejar = None, offline = False, enable_cpio = True):
|
2006-07-14 19:39:46 +02:00
|
|
|
# set up progress bar callback
|
|
|
|
if sys.stdout.isatty() and TextMeter:
|
|
|
|
self.progress_obj = TextMeter(fo=sys.stdout)
|
|
|
|
else:
|
|
|
|
self.progress_obj = None
|
|
|
|
|
|
|
|
self.cachedir = cachedir
|
|
|
|
self.urllist = urllist
|
2007-09-10 14:22:13 +02:00
|
|
|
self.http_debug = http_debug
|
2009-10-09 13:37:27 +02:00
|
|
|
self.offline = offline
|
2010-02-12 10:17:34 +01:00
|
|
|
self.cpio = {}
|
2010-02-24 20:02:52 +01:00
|
|
|
self.enable_cpio = enable_cpio
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
2008-08-20 11:45:49 +02:00
|
|
|
for host in api_host_options.keys():
|
|
|
|
passmgr.add_password(None, host, api_host_options[host]['user'], api_host_options[host]['pass'])
|
2009-02-28 16:56:32 +01:00
|
|
|
openers = (urllib2.HTTPBasicAuthHandler(passmgr), )
|
|
|
|
if cookiejar:
|
|
|
|
openers += (urllib2.HTTPCookieProcessor(cookiejar), )
|
2010-04-07 03:06:03 +02:00
|
|
|
self.gr = OscFileGrabber(progress_obj=self.progress_obj)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
def failureReport(self, errobj):
|
|
|
|
"""failure output for failovers from urlgrabber"""
|
2010-04-07 03:06:03 +02:00
|
|
|
if errobj.url.startswith('file://'):
|
|
|
|
return {}
|
|
|
|
print 'Trying openSUSE Build Service server for %s (%s), not found at %s.' \
|
|
|
|
% (self.curpac, self.curpac.project, errobj.url.split('/')[2])
|
|
|
|
return {}
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2010-09-06 14:13:35 +02:00
|
|
|
def __add_cpio(self, pac):
|
|
|
|
prpap = '%s/%s/%s/%s' % (pac.project, pac.repository, pac.repoarch, pac.repopackage)
|
|
|
|
self.cpio.setdefault(prpap, {})[pac.repofilename] = pac
|
|
|
|
|
|
|
|
def __fetch_cpio(self, apiurl):
|
|
|
|
from urllib import quote_plus
|
|
|
|
for prpap, pkgs in self.cpio.iteritems():
|
|
|
|
project, repo, arch, package = prpap.split('/', 3)
|
|
|
|
query = ['binary=%s' % quote_plus(i) for i in pkgs.keys()]
|
|
|
|
query.append('view=cpio')
|
|
|
|
tmparchive = tmpfile = None
|
|
|
|
try:
|
|
|
|
(fd, tmparchive) = tempfile.mkstemp(prefix='osc_build_cpio')
|
|
|
|
(fd, tmpfile) = tempfile.mkstemp(prefix='osc_build')
|
|
|
|
url = makeurl(apiurl, ['build', project, repo, arch, package], query=query)
|
|
|
|
sys.stdout.write("preparing download ...\r")
|
|
|
|
sys.stdout.flush()
|
2011-09-30 07:42:57 +02:00
|
|
|
self.gr.urlgrab(url, filename = tmparchive, text = 'fetching packages for \'%s\'' % project)
|
2010-09-06 14:13:35 +02:00
|
|
|
archive = cpio.CpioRead(tmparchive)
|
|
|
|
archive.read()
|
|
|
|
for hdr in archive:
|
|
|
|
# XXX: we won't have an .errors file because we're using
|
|
|
|
# getbinarylist instead of the public/... route (which is
|
|
|
|
# routed to getbinaries (but that won't work for kiwi products))
|
|
|
|
if hdr.filename == '.errors':
|
|
|
|
archive.copyin_file(hdr.filename)
|
|
|
|
raise oscerr.APIError('CPIO archive is incomplete (see .errors file)')
|
|
|
|
if package == '_repository':
|
2012-04-03 15:59:42 +02:00
|
|
|
n = re.sub(r'\.pkg\.tar\..z$', '.arch', hdr.filename)
|
|
|
|
pac = pkgs[n.rsplit('.', 1)[0]]
|
2010-09-06 14:13:35 +02:00
|
|
|
else:
|
|
|
|
# this is a kiwi product
|
|
|
|
pac = pkgs[hdr.filename]
|
|
|
|
archive.copyin_file(hdr.filename, os.path.dirname(tmpfile), os.path.basename(tmpfile))
|
|
|
|
self.move_package(tmpfile, pac.localdir, pac)
|
2010-09-06 14:28:39 +02:00
|
|
|
# check if we got all packages... (because we've no .errors file)
|
2010-09-07 16:05:57 +02:00
|
|
|
for pac in pkgs.itervalues():
|
|
|
|
if not os.path.isfile(pac.fullfilename):
|
|
|
|
raise oscerr.APIError('failed to fetch file \'%s\': ' \
|
|
|
|
'does not exist in CPIO archive' % pac.repofilename)
|
2010-09-06 14:13:35 +02:00
|
|
|
finally:
|
|
|
|
if not tmparchive is None and os.path.exists(tmparchive):
|
|
|
|
os.unlink(tmparchive)
|
|
|
|
if not tmpfile is None and os.path.exists(tmpfile):
|
|
|
|
os.unlink(tmpfile)
|
|
|
|
|
2009-10-22 11:43:58 +02:00
|
|
|
def fetch(self, pac, prefix=''):
|
2006-07-14 19:39:46 +02:00
|
|
|
# for use by the failure callback
|
|
|
|
self.curpac = pac
|
|
|
|
|
|
|
|
MirrorGroup._join_url = join_url
|
2010-04-07 03:06:03 +02:00
|
|
|
mg = MirrorGroup(self.gr, pac.urllist, failure_callback=(self.failureReport,(),{}))
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2007-09-10 14:22:13 +02:00
|
|
|
if self.http_debug:
|
2010-08-30 13:13:20 +02:00
|
|
|
print >>sys.stderr, '\nURLs to try for package \'%s\':' % pac
|
|
|
|
print >>sys.stderr, '\n'.join(pac.urllist)
|
|
|
|
print >>sys.stderr
|
2007-09-10 14:22:13 +02:00
|
|
|
|
2010-02-12 10:17:34 +01:00
|
|
|
(fd, tmpfile) = tempfile.mkstemp(prefix='osc_build')
|
2006-07-14 19:39:46 +02:00
|
|
|
try:
|
2010-02-12 10:17:34 +01:00
|
|
|
try:
|
2010-04-07 03:06:03 +02:00
|
|
|
mg.urlgrab(pac.filename,
|
|
|
|
filename = tmpfile,
|
|
|
|
text = '%s(%s) %s' %(prefix, pac.project, pac.filename))
|
2010-02-12 10:17:34 +01:00
|
|
|
self.move_package(tmpfile, pac.localdir, pac)
|
|
|
|
except URLGrabError, e:
|
2010-01-20 09:15:17 +01:00
|
|
|
if self.enable_cpio and e.errno == 256:
|
2010-09-06 14:13:35 +02:00
|
|
|
self.__add_cpio(pac)
|
2010-02-12 10:17:34 +01:00
|
|
|
return
|
|
|
|
print
|
|
|
|
print >>sys.stderr, 'Error:', e.strerror
|
|
|
|
print >>sys.stderr, 'Failed to retrieve %s from the following locations (in order):' % pac.filename
|
|
|
|
print >>sys.stderr, '\n'.join(pac.urllist)
|
|
|
|
sys.exit(1)
|
|
|
|
finally:
|
|
|
|
os.close(fd)
|
|
|
|
if os.path.exists(tmpfile):
|
|
|
|
os.unlink(tmpfile)
|
|
|
|
|
|
|
|
def move_package(self, tmpfile, destdir, pac_obj = None):
|
|
|
|
import shutil
|
|
|
|
pkgq = packagequery.PackageQuery.query(tmpfile, extra_rpmtags=(1044, 1051, 1052))
|
2012-02-22 11:33:34 +01:00
|
|
|
if pkgq:
|
|
|
|
canonname = pkgq.canonname()
|
|
|
|
else:
|
|
|
|
if pac_obj is None:
|
|
|
|
print >>sys.stderr, 'Unsupported file type: ', tmpfile
|
|
|
|
sys.exit(1)
|
|
|
|
canonname = pac_obj.binary
|
|
|
|
|
2010-02-12 10:17:34 +01:00
|
|
|
fullfilename = os.path.join(destdir, canonname)
|
|
|
|
if pac_obj is not None:
|
|
|
|
pac_obj.filename = canonname
|
|
|
|
pac_obj.fullfilename = fullfilename
|
|
|
|
shutil.move(tmpfile, fullfilename)
|
2011-07-20 14:10:01 +02:00
|
|
|
os.chmod(fullfilename, 0644)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
def dirSetup(self, pac):
|
|
|
|
dir = os.path.join(self.cachedir, pac.localdir)
|
|
|
|
if not os.path.exists(dir):
|
2006-07-14 20:23:20 +02:00
|
|
|
try:
|
|
|
|
os.makedirs(dir, mode=0755)
|
|
|
|
except OSError, e:
|
2007-04-25 23:10:49 +02:00
|
|
|
print >>sys.stderr, 'packagecachedir is not writable for you?'
|
|
|
|
print >>sys.stderr, e
|
2006-07-14 20:23:20 +02:00
|
|
|
sys.exit(1)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
def run(self, buildinfo):
|
2009-11-04 15:10:45 +01:00
|
|
|
cached = 0
|
|
|
|
all = len(buildinfo.deps)
|
2009-10-22 11:43:58 +02:00
|
|
|
for i in buildinfo.deps:
|
|
|
|
i.makeurls(self.cachedir, self.urllist)
|
|
|
|
if os.path.exists(i.fullfilename):
|
2009-11-04 15:10:45 +01:00
|
|
|
cached += 1
|
|
|
|
miss = 0
|
|
|
|
needed = all - cached
|
|
|
|
if all:
|
|
|
|
miss = 100.0 * needed / all
|
|
|
|
print "%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all)
|
2009-10-22 11:43:58 +02:00
|
|
|
done = 1
|
2006-07-14 19:39:46 +02:00
|
|
|
for i in buildinfo.deps:
|
|
|
|
i.makeurls(self.cachedir, self.urllist)
|
2009-09-13 19:25:48 +02:00
|
|
|
if not os.path.exists(i.fullfilename):
|
2010-04-17 10:53:53 +02:00
|
|
|
if self.offline:
|
2010-04-17 15:29:03 +02:00
|
|
|
raise oscerr.OscIOError(None, 'Missing package \'%s\' in cache: --offline not possible.' % i.fullfilename)
|
2006-07-14 19:39:46 +02:00
|
|
|
self.dirSetup(i)
|
|
|
|
try:
|
|
|
|
# if there isn't a progress bar, there is no output at all
|
|
|
|
if not self.progress_obj:
|
2010-03-09 02:28:07 +01:00
|
|
|
print '%d/%d (%s) %s' % (done, needed, i.project, i.filename)
|
2009-11-10 01:56:52 +01:00
|
|
|
self.fetch(i)
|
|
|
|
if self.progress_obj:
|
2010-03-09 02:28:07 +01:00
|
|
|
print " %d/%d\r" % (done, needed),
|
2009-11-10 01:56:52 +01:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
2006-07-14 19:39:46 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
print 'Cancelled by user (ctrl-c)'
|
|
|
|
print 'Exiting.'
|
|
|
|
sys.exit(0)
|
2010-03-09 02:28:07 +01:00
|
|
|
done += 1
|
2010-09-06 14:13:35 +02:00
|
|
|
|
|
|
|
self.__fetch_cpio(buildinfo.apiurl)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2010-02-24 20:02:52 +01:00
|
|
|
prjs = buildinfo.projects.keys()
|
2009-11-11 12:28:05 +01:00
|
|
|
for i in prjs:
|
|
|
|
dest = "%s/%s" % (self.cachedir, i)
|
|
|
|
if not os.path.exists(dest):
|
2010-02-24 20:02:52 +01:00
|
|
|
os.makedirs(dest, mode=0755)
|
2010-02-25 13:27:27 +01:00
|
|
|
dest += '/_pubkey'
|
2009-11-11 12:28:05 +01:00
|
|
|
|
2011-01-30 00:01:48 +01:00
|
|
|
url = makeurl(buildinfo.apiurl, ['source', i, '_pubkey'])
|
2010-08-04 15:00:08 +02:00
|
|
|
try:
|
2011-01-30 00:01:48 +01:00
|
|
|
if self.offline and not os.path.exists(dest):
|
2011-02-22 10:48:51 +01:00
|
|
|
# may need to try parent
|
|
|
|
raise URLGrabError(2)
|
2011-01-30 00:01:48 +01:00
|
|
|
elif not self.offline:
|
|
|
|
OscFileGrabber().urlgrab(url, dest)
|
2010-08-17 15:13:36 +02:00
|
|
|
if not i in buildinfo.prjkeys: # not that many keys usually
|
|
|
|
buildinfo.keys.append(dest)
|
|
|
|
buildinfo.prjkeys.append(i)
|
2010-08-04 15:00:08 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
print 'Cancelled by user (ctrl-c)'
|
|
|
|
print 'Exiting.'
|
|
|
|
if os.path.exists(dest):
|
|
|
|
os.unlink(dest)
|
|
|
|
sys.exit(0)
|
|
|
|
except URLGrabError, e:
|
|
|
|
if self.http_debug:
|
2010-08-30 13:13:20 +02:00
|
|
|
print >>sys.stderr, "can't fetch key for %s: %s" %(i, e.strerror)
|
|
|
|
print >>sys.stderr, "url: %s" % url
|
2010-02-24 20:02:52 +01:00
|
|
|
|
2010-08-04 15:00:08 +02:00
|
|
|
if os.path.exists(dest):
|
|
|
|
os.unlink(dest)
|
2010-04-19 13:57:47 +02:00
|
|
|
|
2010-08-04 15:00:08 +02:00
|
|
|
l = i.rsplit(':', 1)
|
|
|
|
# try key from parent project
|
|
|
|
if len(l) > 1 and l[1] and not l[0] in buildinfo.projects:
|
|
|
|
prjs.append(l[0])
|
2009-11-11 12:28:05 +01:00
|
|
|
|
2009-06-16 14:46:02 +02:00
|
|
|
def verify_pacs_old(pac_list):
|
2009-10-20 16:30:15 +02:00
|
|
|
"""Take a list of rpm filenames and run rpm -K on them.
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
In case of failure, exit.
|
|
|
|
|
|
|
|
Check all packages in one go, since this takes only 6 seconds on my Athlon 700
|
|
|
|
instead of 20 when calling 'rpm -K' for each of them.
|
|
|
|
"""
|
2008-10-11 22:26:45 +02:00
|
|
|
import subprocess
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2007-06-26 13:19:41 +02:00
|
|
|
if not pac_list:
|
|
|
|
return
|
2009-10-20 16:30:15 +02:00
|
|
|
|
2008-10-11 22:26:45 +02:00
|
|
|
# don't care about the return value because we check the
|
|
|
|
# output anyway, and rpm always writes to stdout.
|
2007-12-10 15:10:40 +01:00
|
|
|
|
|
|
|
# save locale first (we rely on English rpm output here)
|
|
|
|
saved_LC_ALL = os.environ.get('LC_ALL')
|
|
|
|
os.environ['LC_ALL'] = 'en_EN'
|
|
|
|
|
2008-12-19 16:06:36 +01:00
|
|
|
o = subprocess.Popen(['rpm', '-K'] + pac_list, stdout=subprocess.PIPE,
|
2008-10-11 22:26:45 +02:00
|
|
|
stderr=subprocess.STDOUT, close_fds=True).stdout
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2007-12-10 15:10:40 +01:00
|
|
|
# restore locale
|
2009-10-20 16:30:15 +02:00
|
|
|
if saved_LC_ALL: os.environ['LC_ALL'] = saved_LC_ALL
|
2007-12-10 15:10:40 +01:00
|
|
|
else: os.environ.pop('LC_ALL')
|
|
|
|
|
2006-07-14 19:39:46 +02:00
|
|
|
for line in o.readlines():
|
|
|
|
|
|
|
|
if not 'OK' in line:
|
2009-10-20 16:30:15 +02:00
|
|
|
print
|
2006-07-14 19:39:46 +02:00
|
|
|
print >>sys.stderr, 'The following package could not be verified:'
|
|
|
|
print >>sys.stderr, line
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if 'NOT OK' in line:
|
2009-10-20 16:30:15 +02:00
|
|
|
print
|
2006-07-14 19:39:46 +02:00
|
|
|
print >>sys.stderr, 'The following package could not be verified:'
|
|
|
|
print >>sys.stderr, line
|
|
|
|
|
|
|
|
if 'MISSING KEYS' in line:
|
|
|
|
missing_key = line.split('#')[-1].split(')')[0]
|
|
|
|
|
|
|
|
print >>sys.stderr, """
|
2010-05-27 21:44:35 +02:00
|
|
|
- If the key (%(name)s) is missing, install it first.
|
2007-05-02 21:17:10 +02:00
|
|
|
For example, do the following:
|
2010-02-24 13:39:53 +01:00
|
|
|
osc signkey PROJECT > file
|
2007-05-02 21:17:10 +02:00
|
|
|
and, as root:
|
|
|
|
rpm --import %(dir)s/keyfile-%(name)s
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
Then, just start the build again.
|
2008-01-24 19:06:45 +01:00
|
|
|
|
2009-02-20 13:49:17 +01:00
|
|
|
- If you do not trust the packages, you should configure osc build for XEN or KVM
|
|
|
|
|
|
|
|
- You may use --no-verify to skip the verification (which is a risk for your system).
|
2009-10-20 16:30:15 +02:00
|
|
|
""" % {'name': missing_key,
|
2007-05-02 21:17:10 +02:00
|
|
|
'dir': os.path.expanduser('~')}
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
print >>sys.stderr, """
|
|
|
|
- If the signature is wrong, you may try deleting the package manually
|
|
|
|
and re-run this program, so it is fetched again.
|
|
|
|
"""
|
|
|
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2010-08-04 15:14:37 +02:00
|
|
|
def verify_pacs(bi):
|
2009-06-16 14:46:02 +02:00
|
|
|
"""Take a list of rpm filenames and verify their signatures.
|
|
|
|
|
|
|
|
In case of failure, exit.
|
|
|
|
"""
|
|
|
|
|
2010-08-04 15:14:37 +02:00
|
|
|
pac_list = [ i.fullfilename for i in bi.deps ]
|
2010-08-16 16:06:17 +02:00
|
|
|
if not conf.config['builtin_signature_check']:
|
2009-06-16 14:46:02 +02:00
|
|
|
return verify_pacs_old(pac_list)
|
|
|
|
|
|
|
|
if not pac_list:
|
|
|
|
return
|
|
|
|
|
2010-08-04 15:14:37 +02:00
|
|
|
if not bi.keys:
|
|
|
|
raise oscerr.APIError("can't verify packages due to lack of GPG keys")
|
2009-06-16 14:46:02 +02:00
|
|
|
|
2010-08-04 15:14:37 +02:00
|
|
|
print "using keys from", ', '.join(bi.prjkeys)
|
2009-06-16 14:46:02 +02:00
|
|
|
|
|
|
|
import checker
|
|
|
|
failed = False
|
|
|
|
checker = checker.Checker()
|
|
|
|
try:
|
2010-08-04 15:14:37 +02:00
|
|
|
checker.readkeys(bi.keys)
|
2009-06-16 14:46:02 +02:00
|
|
|
for pkg in pac_list:
|
|
|
|
try:
|
|
|
|
checker.check(pkg)
|
|
|
|
except Exception, e:
|
|
|
|
failed = True
|
|
|
|
print pkg, ':', e
|
2010-02-25 13:36:48 +01:00
|
|
|
except:
|
2009-06-16 14:46:02 +02:00
|
|
|
checker.cleanup()
|
2010-02-25 13:36:48 +01:00
|
|
|
raise
|
2009-06-16 14:46:02 +02:00
|
|
|
|
|
|
|
if failed:
|
|
|
|
checker.cleanup()
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
checker.cleanup()
|
|
|
|
|
|
|
|
# vim: sw=4 et
|