2009-03-02 15:03:06 +01:00
|
|
|
# Copyright (C) 2006 Novell Inc. All rights reserved.
|
2006-07-14 19:39:46 +02:00
|
|
|
# This program is free software; it may be used, copied, modified
|
|
|
|
# and distributed under the terms of the GNU General Public Licence,
|
|
|
|
# either version 2, or (at your option) any later version.
|
|
|
|
|
2013-04-09 12:51:28 +02:00
|
|
|
|
2024-03-14 13:48:02 +01:00
|
|
|
import glob
|
2022-07-28 12:28:33 +02:00
|
|
|
import os
|
|
|
|
import re
|
2022-09-09 11:46:28 +02:00
|
|
|
import shutil
|
|
|
|
import subprocess
|
2022-07-28 12:28:33 +02:00
|
|
|
import sys
|
|
|
|
import tempfile
|
|
|
|
from urllib.request import HTTPError
|
2012-10-10 17:03:49 +02:00
|
|
|
|
2022-09-09 11:46:28 +02:00
|
|
|
from . import checker as osc_checker
|
2022-07-28 12:28:33 +02:00
|
|
|
from . import conf
|
|
|
|
from . import oscerr
|
2022-09-09 11:46:28 +02:00
|
|
|
from .core import makeurl, dgst
|
2018-02-02 15:47:17 +01:00
|
|
|
from .grabber import OscFileGrabber, OscMirrorGroup
|
2022-07-28 12:28:33 +02:00
|
|
|
from .meter import create_text_meter
|
2013-04-09 11:35:53 +02:00
|
|
|
from .util import packagequery, cpio
|
2022-07-28 12:28:33 +02:00
|
|
|
from .util.helper import decode_it
|
2018-02-02 15:47:17 +01:00
|
|
|
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
class Fetcher:
|
2022-08-29 15:32:41 +02:00
|
|
|
def __init__(self, cachedir='/tmp', urllist=None,
|
2019-12-06 08:18:52 +01:00
|
|
|
http_debug=False, cookiejar=None, offline=False,
|
2022-08-29 15:32:41 +02:00
|
|
|
enable_cpio=True, modules=None, download_api_only=False):
|
2006-07-14 19:39:46 +02:00
|
|
|
# set up progress bar callback
|
2018-12-13 13:29:14 +01:00
|
|
|
self.progress_obj = None
|
|
|
|
if sys.stdout.isatty():
|
|
|
|
self.progress_obj = create_text_meter(use_pb_fallback=False)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
self.cachedir = cachedir
|
2021-09-16 16:56:38 +02:00
|
|
|
# generic download URL lists
|
2022-08-29 15:32:41 +02:00
|
|
|
self.urllist = urllist or []
|
|
|
|
self.modules = modules or []
|
2007-09-10 14:22:13 +02:00
|
|
|
self.http_debug = http_debug
|
2009-10-09 13:37:27 +02:00
|
|
|
self.offline = offline
|
2010-02-12 10:17:34 +01:00
|
|
|
self.cpio = {}
|
2010-02-24 20:02:52 +01:00
|
|
|
self.enable_cpio = enable_cpio
|
2022-02-18 11:55:56 +01:00
|
|
|
self.download_api_only = download_api_only
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2010-04-07 03:06:03 +02:00
|
|
|
self.gr = OscFileGrabber(progress_obj=self.progress_obj)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2010-09-06 14:13:35 +02:00
|
|
|
def __add_cpio(self, pac):
|
2024-01-06 09:54:57 +01:00
|
|
|
prpap = f'{pac.project}/{pac.repository}/{pac.repoarch}/{pac.repopackage}'
|
2010-09-06 14:13:35 +02:00
|
|
|
self.cpio.setdefault(prpap, {})[pac.repofilename] = pac
|
|
|
|
|
2012-10-10 17:03:49 +02:00
|
|
|
def __download_cpio_archive(self, apiurl, project, repo, arch, package, **pkgs):
|
|
|
|
if not pkgs:
|
|
|
|
return
|
2024-02-01 21:14:32 +01:00
|
|
|
query = {}
|
|
|
|
query["binary"] = pkgs
|
|
|
|
query["view"] = "cpio"
|
|
|
|
query["module"] = self.modules
|
2012-10-10 17:03:49 +02:00
|
|
|
try:
|
|
|
|
url = makeurl(apiurl, ['build', project, repo, arch, package], query=query)
|
|
|
|
sys.stdout.write("preparing download ...\r")
|
|
|
|
sys.stdout.flush()
|
2013-08-12 18:22:46 +02:00
|
|
|
with tempfile.NamedTemporaryFile(prefix='osc_build_cpio') as tmparchive:
|
|
|
|
self.gr.urlgrab(url, filename=tmparchive.name,
|
2024-01-06 09:54:57 +01:00
|
|
|
text=f'fetching packages for \'{project}\'')
|
2013-08-12 18:22:46 +02:00
|
|
|
archive = cpio.CpioRead(tmparchive.name)
|
|
|
|
archive.read()
|
|
|
|
for hdr in archive:
|
|
|
|
# XXX: we won't have an .errors file because we're using
|
|
|
|
# getbinarylist instead of the public/... route
|
|
|
|
# (which is routed to getbinaries)
|
|
|
|
# getbinaries does not support kiwi builds
|
2019-12-05 13:02:24 +01:00
|
|
|
if hdr.filename == b'.errors':
|
2019-12-10 11:24:17 +01:00
|
|
|
archive.copyin_file(hdr.filename)
|
2013-08-12 18:22:46 +02:00
|
|
|
raise oscerr.APIError('CPIO archive is incomplete '
|
|
|
|
'(see .errors file)')
|
|
|
|
if package == '_repository':
|
2022-07-28 19:11:29 +02:00
|
|
|
n = re.sub(br'\.pkg\.tar\.(zst|.z)$', b'.arch', hdr.filename)
|
2018-12-13 13:29:14 +01:00
|
|
|
if n.startswith(b'container:'):
|
2022-07-28 19:11:29 +02:00
|
|
|
n = re.sub(br'\.tar\.(zst|.z)$', b'.tar', hdr.filename)
|
2018-12-13 13:29:14 +01:00
|
|
|
pac = pkgs[decode_it(n.rsplit(b'.', 1)[0])]
|
2017-05-16 14:02:37 +02:00
|
|
|
pac.canonname = hdr.filename
|
|
|
|
else:
|
2018-12-13 13:29:14 +01:00
|
|
|
pac = pkgs[decode_it(n.rsplit(b'.', 1)[0])]
|
2013-08-12 18:22:46 +02:00
|
|
|
else:
|
|
|
|
# this is a kiwi product
|
2018-12-13 13:29:14 +01:00
|
|
|
pac = pkgs[decode_it(hdr.filename)]
|
2013-08-12 18:22:46 +02:00
|
|
|
|
|
|
|
# Extract a single file from the cpio archive
|
2023-01-20 13:43:14 +01:00
|
|
|
fd = None
|
|
|
|
tmpfile = None
|
2013-08-12 18:22:46 +02:00
|
|
|
try:
|
|
|
|
fd, tmpfile = tempfile.mkstemp(prefix='osc_build_file')
|
|
|
|
archive.copyin_file(hdr.filename,
|
2020-11-20 09:38:36 +01:00
|
|
|
decode_it(os.path.dirname(tmpfile)),
|
|
|
|
decode_it(os.path.basename(tmpfile)))
|
2013-08-12 18:22:46 +02:00
|
|
|
self.move_package(tmpfile, pac.localdir, pac)
|
|
|
|
finally:
|
2023-01-20 13:43:14 +01:00
|
|
|
if fd is not None:
|
|
|
|
os.close(fd)
|
|
|
|
if tmpfile is not None and os.path.exists(tmpfile):
|
2013-08-12 18:22:46 +02:00
|
|
|
os.unlink(tmpfile)
|
|
|
|
|
|
|
|
for pac in pkgs.values():
|
|
|
|
if not os.path.isfile(pac.fullfilename):
|
|
|
|
raise oscerr.APIError('failed to fetch file \'%s\': '
|
|
|
|
'missing in CPIO archive' %
|
|
|
|
pac.repofilename)
|
2018-02-02 15:47:17 +01:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.code != 414:
|
2012-10-10 17:03:49 +02:00
|
|
|
raise
|
|
|
|
# query str was too large
|
2013-04-09 11:25:19 +02:00
|
|
|
keys = list(pkgs.keys())
|
2012-10-10 17:03:49 +02:00
|
|
|
if len(keys) == 1:
|
2013-08-12 18:22:46 +02:00
|
|
|
raise oscerr.APIError('unable to fetch cpio archive: '
|
|
|
|
'server always returns code 414')
|
2018-12-13 13:29:14 +01:00
|
|
|
n = int(len(pkgs) / 2)
|
2022-07-28 19:11:29 +02:00
|
|
|
new_pkgs = {k: pkgs[k] for k in keys[:n]}
|
2013-08-12 18:22:46 +02:00
|
|
|
self.__download_cpio_archive(apiurl, project, repo, arch,
|
|
|
|
package, **new_pkgs)
|
2022-07-28 19:11:29 +02:00
|
|
|
new_pkgs = {k: pkgs[k] for k in keys[n:]}
|
2013-08-12 18:22:46 +02:00
|
|
|
self.__download_cpio_archive(apiurl, project, repo, arch,
|
|
|
|
package, **new_pkgs)
|
2012-10-10 17:03:49 +02:00
|
|
|
|
2010-09-06 14:13:35 +02:00
|
|
|
def __fetch_cpio(self, apiurl):
|
2013-04-09 11:25:19 +02:00
|
|
|
for prpap, pkgs in self.cpio.items():
|
2010-09-06 14:13:35 +02:00
|
|
|
project, repo, arch, package = prpap.split('/', 3)
|
2012-10-10 17:03:49 +02:00
|
|
|
self.__download_cpio_archive(apiurl, project, repo, arch, package, **pkgs)
|
2010-09-06 14:13:35 +02:00
|
|
|
|
2009-10-22 11:43:58 +02:00
|
|
|
def fetch(self, pac, prefix=''):
|
2006-07-14 19:39:46 +02:00
|
|
|
# for use by the failure callback
|
|
|
|
self.curpac = pac
|
|
|
|
|
2018-02-02 15:47:17 +01:00
|
|
|
mg = OscMirrorGroup(self.gr, pac.urllist)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2007-09-10 14:22:13 +02:00
|
|
|
if self.http_debug:
|
2024-01-06 09:54:57 +01:00
|
|
|
print(f'\nURLs to try for package \'{pac}\':', file=sys.stderr)
|
2013-04-09 12:51:28 +02:00
|
|
|
print('\n'.join(pac.urllist), file=sys.stderr)
|
|
|
|
print(file=sys.stderr)
|
2007-09-10 14:22:13 +02:00
|
|
|
|
2006-07-14 19:39:46 +02:00
|
|
|
try:
|
2013-08-13 11:54:11 +02:00
|
|
|
with tempfile.NamedTemporaryFile(prefix='osc_build',
|
|
|
|
delete=False) as tmpfile:
|
2018-02-02 15:47:17 +01:00
|
|
|
mg_stat = mg.urlgrab(pac.filename, filename=tmpfile.name,
|
2024-01-06 09:54:57 +01:00
|
|
|
text=f'{prefix}({pac.project}) {pac.filename}')
|
2018-02-02 15:47:17 +01:00
|
|
|
if mg_stat:
|
|
|
|
self.move_package(tmpfile.name, pac.localdir, pac)
|
|
|
|
|
|
|
|
if not mg_stat:
|
|
|
|
if self.enable_cpio:
|
|
|
|
print('%s/%s: attempting download from api, since not found'
|
|
|
|
% (pac.project, pac.name))
|
|
|
|
self.__add_cpio(pac)
|
|
|
|
return
|
|
|
|
print()
|
|
|
|
print('Error: Failed to retrieve %s from the following locations '
|
|
|
|
'(in order):' % pac.filename, file=sys.stderr)
|
|
|
|
print('\n'.join(pac.urllist), file=sys.stderr)
|
|
|
|
sys.exit(1)
|
2013-08-13 11:54:11 +02:00
|
|
|
finally:
|
|
|
|
if os.path.exists(tmpfile.name):
|
|
|
|
os.unlink(tmpfile.name)
|
2010-02-12 10:17:34 +01:00
|
|
|
|
2013-08-12 18:22:46 +02:00
|
|
|
def move_package(self, tmpfile, destdir, pac_obj=None):
|
2017-05-16 14:02:37 +02:00
|
|
|
canonname = None
|
|
|
|
if pac_obj and pac_obj.name.startswith('container:'):
|
|
|
|
canonname = pac_obj.canonname
|
|
|
|
if canonname is None:
|
|
|
|
pkgq = packagequery.PackageQuery.query(tmpfile, extra_rpmtags=(1044, 1051, 1052))
|
|
|
|
if pkgq:
|
|
|
|
canonname = pkgq.canonname()
|
|
|
|
else:
|
|
|
|
if pac_obj is None:
|
|
|
|
print('Unsupported file type: ', tmpfile, file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
canonname = pac_obj.binary
|
2020-05-27 09:40:16 +02:00
|
|
|
decoded_canonname = decode_it(canonname)
|
|
|
|
if b'/' in canonname or '/' in decoded_canonname:
|
|
|
|
raise oscerr.OscIOError(None, 'canonname contains a slash')
|
2012-02-22 11:33:34 +01:00
|
|
|
|
2020-05-27 09:40:16 +02:00
|
|
|
fullfilename = os.path.join(destdir, decoded_canonname)
|
2010-02-12 10:17:34 +01:00
|
|
|
if pac_obj is not None:
|
2015-06-16 17:37:40 +02:00
|
|
|
pac_obj.canonname = canonname
|
2010-02-12 10:17:34 +01:00
|
|
|
pac_obj.fullfilename = fullfilename
|
|
|
|
shutil.move(tmpfile, fullfilename)
|
2013-04-09 12:45:16 +02:00
|
|
|
os.chmod(fullfilename, 0o644)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
def dirSetup(self, pac):
|
|
|
|
dir = os.path.join(self.cachedir, pac.localdir)
|
|
|
|
if not os.path.exists(dir):
|
2006-07-14 20:23:20 +02:00
|
|
|
try:
|
2013-04-09 12:45:16 +02:00
|
|
|
os.makedirs(dir, mode=0o755)
|
2013-04-09 11:27:02 +02:00
|
|
|
except OSError as e:
|
2013-04-09 12:51:28 +02:00
|
|
|
print('packagecachedir is not writable for you?', file=sys.stderr)
|
|
|
|
print(e, file=sys.stderr)
|
2006-07-14 20:23:20 +02:00
|
|
|
sys.exit(1)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2021-09-16 16:56:38 +02:00
|
|
|
def _build_urllist(self, buildinfo, pac):
|
2022-02-18 11:55:56 +01:00
|
|
|
if self.download_api_only:
|
|
|
|
return []
|
2021-09-16 16:56:38 +02:00
|
|
|
urllist = self.urllist
|
2024-01-06 09:54:57 +01:00
|
|
|
key = f'{pac.project}/{pac.repository}'
|
2021-09-16 16:56:38 +02:00
|
|
|
project_repo_url = buildinfo.urls.get(key)
|
|
|
|
if project_repo_url is not None:
|
|
|
|
urllist = [project_repo_url]
|
|
|
|
return urllist
|
|
|
|
|
2006-07-14 19:39:46 +02:00
|
|
|
def run(self, buildinfo):
|
2023-02-20 14:28:59 +01:00
|
|
|
apiurl = buildinfo.apiurl
|
2009-11-04 15:10:45 +01:00
|
|
|
cached = 0
|
|
|
|
all = len(buildinfo.deps)
|
2009-10-22 11:43:58 +02:00
|
|
|
for i in buildinfo.deps:
|
2021-09-16 16:56:38 +02:00
|
|
|
urllist = self._build_urllist(buildinfo, i)
|
|
|
|
i.makeurls(self.cachedir, urllist)
|
2017-08-02 11:02:14 +02:00
|
|
|
# find container extension by looking in the cache
|
|
|
|
if i.name.startswith('container:') and i.fullfilename.endswith('.tar.xz'):
|
|
|
|
for ext in ['.tar.xz', '.tar.gz', '.tar']:
|
|
|
|
if os.path.exists(i.fullfilename[:-7] + ext):
|
|
|
|
i.canonname = i.canonname[:-7] + ext
|
2021-09-16 16:56:38 +02:00
|
|
|
i.makeurls(self.cachedir, urllist)
|
2017-08-02 11:02:14 +02:00
|
|
|
|
2009-10-22 11:43:58 +02:00
|
|
|
if os.path.exists(i.fullfilename):
|
2009-11-04 15:10:45 +01:00
|
|
|
cached += 1
|
2021-10-12 10:54:20 +02:00
|
|
|
if not i.name.startswith('container:') and i.pacsuffix != 'rpm':
|
|
|
|
continue
|
2023-02-20 14:28:59 +01:00
|
|
|
|
|
|
|
hdrmd5_is_valid = True
|
2014-11-17 12:39:58 +01:00
|
|
|
if i.hdrmd5:
|
2017-05-17 11:20:03 +02:00
|
|
|
if i.name.startswith('container:'):
|
|
|
|
hdrmd5 = dgst(i.fullfilename)
|
2023-02-20 14:28:59 +01:00
|
|
|
if hdrmd5 != i.hdrmd5:
|
|
|
|
hdrmd5_is_valid = False
|
2017-05-17 11:20:03 +02:00
|
|
|
else:
|
|
|
|
hdrmd5 = packagequery.PackageQuery.queryhdrmd5(i.fullfilename)
|
2023-02-20 14:28:59 +01:00
|
|
|
if hdrmd5 != i.hdrmd5:
|
|
|
|
if conf.config["api_host_options"][apiurl]["disable_hdrmd5_check"]:
|
|
|
|
print(f"Warning: Ignoring a hdrmd5 mismatch for {i.fullfilename}: {hdrmd5} (actual) != {i.hdrmd5} (expected)")
|
|
|
|
hdrmd5_is_valid = True
|
|
|
|
else:
|
|
|
|
print(f"The file will be redownloaded from the API due to a hdrmd5 mismatch for {i.fullfilename}: {hdrmd5} (actual) != {i.hdrmd5} (expected)")
|
|
|
|
hdrmd5_is_valid = False
|
|
|
|
|
|
|
|
if not hdrmd5_is_valid:
|
2014-11-17 12:39:58 +01:00
|
|
|
os.unlink(i.fullfilename)
|
|
|
|
cached -= 1
|
2021-09-16 16:56:38 +02:00
|
|
|
|
2009-11-04 15:10:45 +01:00
|
|
|
miss = 0
|
|
|
|
needed = all - cached
|
|
|
|
if all:
|
|
|
|
miss = 100.0 * needed / all
|
2013-04-09 12:51:28 +02:00
|
|
|
print("%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all))
|
2009-10-22 11:43:58 +02:00
|
|
|
done = 1
|
2006-07-14 19:39:46 +02:00
|
|
|
for i in buildinfo.deps:
|
2009-09-13 19:25:48 +02:00
|
|
|
if not os.path.exists(i.fullfilename):
|
2010-04-17 10:53:53 +02:00
|
|
|
if self.offline:
|
2013-08-12 18:22:46 +02:00
|
|
|
raise oscerr.OscIOError(None,
|
|
|
|
'Missing \'%s\' in cache: '
|
|
|
|
'--offline not possible.' %
|
|
|
|
i.fullfilename)
|
2006-07-14 19:39:46 +02:00
|
|
|
self.dirSetup(i)
|
|
|
|
try:
|
|
|
|
# if there isn't a progress bar, there is no output at all
|
2018-02-02 15:47:17 +01:00
|
|
|
prefix = ''
|
2006-07-14 19:39:46 +02:00
|
|
|
if not self.progress_obj:
|
2013-04-09 12:51:28 +02:00
|
|
|
print('%d/%d (%s) %s' % (done, needed, i.project, i.filename))
|
2018-02-02 15:47:17 +01:00
|
|
|
else:
|
|
|
|
prefix = '[%d/%d] ' % (done, needed)
|
|
|
|
self.fetch(i, prefix=prefix)
|
2009-11-10 01:56:52 +01:00
|
|
|
|
2022-02-18 11:56:25 +01:00
|
|
|
if not os.path.isfile(i.fullfilename):
|
|
|
|
# if the file wasn't downloaded and cannot be found on disk,
|
|
|
|
# mark it for downloading from the API
|
|
|
|
self.__add_cpio(i)
|
|
|
|
else:
|
|
|
|
hdrmd5 = packagequery.PackageQuery.queryhdrmd5(i.fullfilename)
|
2023-02-20 14:28:59 +01:00
|
|
|
if hdrmd5 != i.hdrmd5:
|
|
|
|
if conf.config["api_host_options"][apiurl]["disable_hdrmd5_check"]:
|
|
|
|
print(f"Warning: Ignoring a hdrmd5 mismatch for {i.fullfilename}: {hdrmd5} (actual) != {i.hdrmd5} (expected)")
|
|
|
|
else:
|
|
|
|
print(f"The file will be redownloaded from the API due to a hdrmd5 mismatch for {i.fullfilename}: {hdrmd5} (actual) != {i.hdrmd5} (expected)")
|
|
|
|
os.unlink(i.fullfilename)
|
|
|
|
self.__add_cpio(i)
|
|
|
|
|
2006-07-14 19:39:46 +02:00
|
|
|
except KeyboardInterrupt:
|
2013-04-09 12:51:28 +02:00
|
|
|
print('Cancelled by user (ctrl-c)')
|
|
|
|
print('Exiting.')
|
2006-07-14 19:39:46 +02:00
|
|
|
sys.exit(0)
|
2010-03-09 02:28:07 +01:00
|
|
|
done += 1
|
2010-09-06 14:13:35 +02:00
|
|
|
|
|
|
|
self.__fetch_cpio(buildinfo.apiurl)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2013-04-09 11:25:19 +02:00
|
|
|
prjs = list(buildinfo.projects.keys())
|
2024-03-14 13:48:02 +01:00
|
|
|
for prj in prjs:
|
|
|
|
dest = os.path.join(self.cachedir, prj)
|
|
|
|
pubkey_path_base = os.path.join(dest, "_pubkey")
|
|
|
|
pubkey_paths = glob.glob(f"{pubkey_path_base}*")
|
2013-03-19 17:17:14 +01:00
|
|
|
|
2024-03-14 13:48:02 +01:00
|
|
|
if self.offline:
|
|
|
|
# we're offline, only index the keys found on disk
|
|
|
|
if pubkey_paths:
|
|
|
|
for pubkey_path in pubkey_paths:
|
|
|
|
buildinfo.keys.append(pubkey_path)
|
|
|
|
buildinfo.prjkeys.append(prj)
|
|
|
|
continue
|
2010-02-24 20:02:52 +01:00
|
|
|
|
2024-03-14 13:48:02 +01:00
|
|
|
from . import obs_api
|
2010-04-19 13:57:47 +02:00
|
|
|
|
2024-03-14 13:48:02 +01:00
|
|
|
os.makedirs(dest, mode=0o755, exist_ok=True)
|
|
|
|
pubkeys = []
|
|
|
|
|
|
|
|
try:
|
|
|
|
keyinfo = obs_api.Keyinfo.from_api(buildinfo.apiurl, prj)
|
|
|
|
for pubkey in keyinfo.pubkey_list or []:
|
|
|
|
pubkeys.append(pubkey.value)
|
|
|
|
except HTTPError as e:
|
|
|
|
result = obs_api.Keyinfo.get_pubkey_deprecated(buildinfo.apiurl, prj, traverse=True)
|
|
|
|
if result:
|
|
|
|
# overwrite ``prj`` with the project that contains the key we're using
|
|
|
|
prj, pubkey = result
|
|
|
|
pubkeys.append(pubkey)
|
|
|
|
|
|
|
|
# remove the existing files, we'll create new files with new contents
|
|
|
|
for pubkey_path in pubkey_paths:
|
|
|
|
os.unlink(pubkey_path)
|
|
|
|
|
|
|
|
if pubkeys:
|
|
|
|
for num, pubkey in enumerate(pubkeys):
|
|
|
|
pubkey_path = f"{pubkey_path_base}-{num}"
|
|
|
|
with open(pubkey_path, "w") as f:
|
|
|
|
f.write(pubkey)
|
|
|
|
buildinfo.keys.append(pubkey_path)
|
|
|
|
if prj not in buildinfo.prjkeys:
|
|
|
|
buildinfo.prjkeys.append(prj)
|
2009-11-11 12:28:05 +01:00
|
|
|
|
2013-08-12 18:22:46 +02:00
|
|
|
|
2009-06-16 14:46:02 +02:00
|
|
|
def verify_pacs_old(pac_list):
|
2009-10-20 16:30:15 +02:00
|
|
|
"""Take a list of rpm filenames and run rpm -K on them.
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
In case of failure, exit.
|
|
|
|
|
|
|
|
Check all packages in one go, since this takes only 6 seconds on my Athlon 700
|
|
|
|
instead of 20 when calling 'rpm -K' for each of them.
|
|
|
|
"""
|
2007-06-26 13:19:41 +02:00
|
|
|
if not pac_list:
|
|
|
|
return
|
2009-10-20 16:30:15 +02:00
|
|
|
|
2008-10-11 22:26:45 +02:00
|
|
|
# don't care about the return value because we check the
|
|
|
|
# output anyway, and rpm always writes to stdout.
|
2007-12-10 15:10:40 +01:00
|
|
|
|
|
|
|
# save locale first (we rely on English rpm output here)
|
|
|
|
saved_LC_ALL = os.environ.get('LC_ALL')
|
|
|
|
os.environ['LC_ALL'] = 'en_EN'
|
|
|
|
|
2008-12-19 16:06:36 +01:00
|
|
|
o = subprocess.Popen(['rpm', '-K'] + pac_list, stdout=subprocess.PIPE,
|
2013-08-12 18:22:46 +02:00
|
|
|
stderr=subprocess.STDOUT, close_fds=True).stdout
|
2006-07-14 19:39:46 +02:00
|
|
|
|
2007-12-10 15:10:40 +01:00
|
|
|
# restore locale
|
2013-08-12 18:22:46 +02:00
|
|
|
if saved_LC_ALL:
|
2013-05-27 14:58:15 +02:00
|
|
|
os.environ['LC_ALL'] = saved_LC_ALL
|
2013-08-12 18:22:46 +02:00
|
|
|
else:
|
2013-05-27 14:58:15 +02:00
|
|
|
os.environ.pop('LC_ALL')
|
2007-12-10 15:10:40 +01:00
|
|
|
|
2006-07-14 19:39:46 +02:00
|
|
|
for line in o.readlines():
|
|
|
|
|
2013-08-12 18:22:46 +02:00
|
|
|
if 'OK' not in line:
|
2013-04-09 12:51:28 +02:00
|
|
|
print()
|
|
|
|
print('The following package could not be verified:', file=sys.stderr)
|
|
|
|
print(line, file=sys.stderr)
|
2006-07-14 19:39:46 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if 'NOT OK' in line:
|
2013-04-09 12:51:28 +02:00
|
|
|
print()
|
|
|
|
print('The following package could not be verified:', file=sys.stderr)
|
|
|
|
print(line, file=sys.stderr)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
if 'MISSING KEYS' in line:
|
|
|
|
missing_key = line.split('#')[-1].split(')')[0]
|
|
|
|
|
2013-04-09 12:51:28 +02:00
|
|
|
print("""
|
2010-05-27 21:44:35 +02:00
|
|
|
- If the key (%(name)s) is missing, install it first.
|
2007-05-02 21:17:10 +02:00
|
|
|
For example, do the following:
|
2010-02-24 13:39:53 +01:00
|
|
|
osc signkey PROJECT > file
|
2007-05-02 21:17:10 +02:00
|
|
|
and, as root:
|
|
|
|
rpm --import %(dir)s/keyfile-%(name)s
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
Then, just start the build again.
|
2008-01-24 19:06:45 +01:00
|
|
|
|
2009-02-20 13:49:17 +01:00
|
|
|
- If you do not trust the packages, you should configure osc build for XEN or KVM
|
|
|
|
|
|
|
|
- You may use --no-verify to skip the verification (which is a risk for your system).
|
2009-10-20 16:30:15 +02:00
|
|
|
""" % {'name': missing_key,
|
2022-09-12 13:43:10 +02:00
|
|
|
'dir': os.path.expanduser('~')}, file=sys.stderr)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
else:
|
2013-04-09 12:51:28 +02:00
|
|
|
print("""
|
2006-07-14 19:39:46 +02:00
|
|
|
- If the signature is wrong, you may try deleting the package manually
|
|
|
|
and re-run this program, so it is fetched again.
|
2013-04-09 12:51:28 +02:00
|
|
|
""", file=sys.stderr)
|
2006-07-14 19:39:46 +02:00
|
|
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2010-08-04 15:14:37 +02:00
|
|
|
def verify_pacs(bi):
|
2009-06-16 14:46:02 +02:00
|
|
|
"""Take a list of rpm filenames and verify their signatures.
|
|
|
|
|
|
|
|
In case of failure, exit.
|
|
|
|
"""
|
|
|
|
|
2013-08-12 18:22:46 +02:00
|
|
|
pac_list = [i.fullfilename for i in bi.deps]
|
|
|
|
if conf.config['builtin_signature_check'] is not True:
|
2009-06-16 14:46:02 +02:00
|
|
|
return verify_pacs_old(pac_list)
|
|
|
|
|
|
|
|
if not pac_list:
|
|
|
|
return
|
|
|
|
|
2010-08-04 15:14:37 +02:00
|
|
|
if not bi.keys:
|
|
|
|
raise oscerr.APIError("can't verify packages due to lack of GPG keys")
|
2009-06-16 14:46:02 +02:00
|
|
|
|
2013-04-09 12:51:28 +02:00
|
|
|
print("using keys from", ', '.join(bi.prjkeys))
|
2009-06-16 14:46:02 +02:00
|
|
|
|
|
|
|
failed = False
|
2022-09-09 11:46:28 +02:00
|
|
|
checker = osc_checker.Checker()
|
2009-06-16 14:46:02 +02:00
|
|
|
try:
|
2010-08-04 15:14:37 +02:00
|
|
|
checker.readkeys(bi.keys)
|
2009-06-16 14:46:02 +02:00
|
|
|
for pkg in pac_list:
|
|
|
|
try:
|
|
|
|
checker.check(pkg)
|
2013-04-09 11:27:02 +02:00
|
|
|
except Exception as e:
|
2009-06-16 14:46:02 +02:00
|
|
|
failed = True
|
2013-04-09 12:51:28 +02:00
|
|
|
print(pkg, ':', e)
|
2010-02-25 13:36:48 +01:00
|
|
|
except:
|
2009-06-16 14:46:02 +02:00
|
|
|
checker.cleanup()
|
2010-02-25 13:36:48 +01:00
|
|
|
raise
|
2009-06-16 14:46:02 +02:00
|
|
|
|
|
|
|
if failed:
|
|
|
|
checker.cleanup()
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
checker.cleanup()
|
|
|
|
|
|
|
|
# vim: sw=4 et
|