1
0
mirror of https://github.com/openSUSE/osc.git synced 2024-11-09 22:36:14 +01:00

fix downloading from mirrors

Some of our repositories have specific download urls. osc is ignoring
this so far and just tries to use the generic downloadurl

This code prefers definitions for individual path elements if they exist.
We could IMHO remove the old code, since old OBS instances would still
work via the api download fallback.

Real life examples for repo specific configs are on openSUSE all
repositories outside of the /repositories/ directory. eg.

  <path project="openSUSE:Tumbleweed" repository="dod" url="http://download.opensuse.org/tumbleweed/repo/oss/"/>

Co-Author: Marcus Hüwe <suse-tux@gmx.de>
This commit is contained in:
Adrian Schröter 2021-09-16 16:56:38 +02:00
parent 5fdcecfbab
commit 3b90480dfc
No known key found for this signature in database
GPG Key ID: 918D8C954C08DB67
2 changed files with 22 additions and 4 deletions

View File

@ -157,6 +157,7 @@ class Buildinfo:
self.keys = [] self.keys = []
self.prjkeys = [] self.prjkeys = []
self.pathes = [] self.pathes = []
self.urls = {}
self.modules = [] self.modules = []
for node in root.findall('module'): for node in root.findall('module'):
self.modules.append(node.text) self.modules.append(node.text)
@ -174,7 +175,12 @@ class Buildinfo:
self.projects[p.project] = 1 self.projects[p.project] = 1
self.deps.append(p) self.deps.append(p)
for node in root.findall('path'): for node in root.findall('path'):
# old simple list for compatibility
# XXX: really old? This is currently used for kiwi builds
self.pathes.append(node.get('project')+"/"+node.get('repository')) self.pathes.append(node.get('project')+"/"+node.get('repository'))
# a hash providing the matching URL for specific repos for newer OBS instances
if node.get('url'):
self.urls[node.get('project')+"/"+node.get('repository')] = node.get('url') + '/%(arch)s/%(filename)s'
self.vminstall_list = [ dep.name for dep in self.deps if dep.vminstall ] self.vminstall_list = [ dep.name for dep in self.deps if dep.vminstall ]
self.preinstall_list = [ dep.name for dep in self.deps if dep.preinstall ] self.preinstall_list = [ dep.name for dep in self.deps if dep.preinstall ]
@ -1024,7 +1030,9 @@ def main(apiurl, opts, argv):
else: else:
urllist = config['urllist'] urllist = config['urllist']
# OBS 1.5 and before has no downloadurl defined in buildinfo # OBS 1.5 and before has no downloadurl defined in buildinfo, but it is obsolete again meanwhile.
# we have now specific download repositories per repository. Could be removed IMHO, since the api fallback
# is there. In worst case it could fetch the wrong rpm...
if bi.downloadurl: if bi.downloadurl:
urllist.append(bi.downloadurl + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s') urllist.append(bi.downloadurl + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s')
if opts.disable_cpio_bulk_download: if opts.disable_cpio_bulk_download:

View File

@ -36,6 +36,7 @@ class Fetcher:
self.progress_obj = create_text_meter(use_pb_fallback=False) self.progress_obj = create_text_meter(use_pb_fallback=False)
self.cachedir = cachedir self.cachedir = cachedir
# generic download URL lists
self.urllist = urllist self.urllist = urllist
self.modules = modules self.modules = modules
self.http_debug = http_debug self.http_debug = http_debug
@ -193,17 +194,26 @@ class Fetcher:
print(e, file=sys.stderr) print(e, file=sys.stderr)
sys.exit(1) sys.exit(1)
def _build_urllist(self, buildinfo, pac):
urllist = self.urllist
key = '%s/%s' % (pac.project, pac.repository)
project_repo_url = buildinfo.urls.get(key)
if project_repo_url is not None:
urllist = [project_repo_url]
return urllist
def run(self, buildinfo): def run(self, buildinfo):
cached = 0 cached = 0
all = len(buildinfo.deps) all = len(buildinfo.deps)
for i in buildinfo.deps: for i in buildinfo.deps:
i.makeurls(self.cachedir, self.urllist) urllist = self._build_urllist(buildinfo, i)
i.makeurls(self.cachedir, urllist)
# find container extension by looking in the cache # find container extension by looking in the cache
if i.name.startswith('container:') and i.fullfilename.endswith('.tar.xz'): if i.name.startswith('container:') and i.fullfilename.endswith('.tar.xz'):
for ext in ['.tar.xz', '.tar.gz', '.tar']: for ext in ['.tar.xz', '.tar.gz', '.tar']:
if os.path.exists(i.fullfilename[:-7] + ext): if os.path.exists(i.fullfilename[:-7] + ext):
i.canonname = i.canonname[:-7] + ext i.canonname = i.canonname[:-7] + ext
i.makeurls(self.cachedir, self.urllist) i.makeurls(self.cachedir, urllist)
if os.path.exists(i.fullfilename): if os.path.exists(i.fullfilename):
cached += 1 cached += 1
@ -216,6 +226,7 @@ class Fetcher:
if not hdrmd5 or hdrmd5 != i.hdrmd5: if not hdrmd5 or hdrmd5 != i.hdrmd5:
os.unlink(i.fullfilename) os.unlink(i.fullfilename)
cached -= 1 cached -= 1
miss = 0 miss = 0
needed = all - cached needed = all - cached
if all: if all:
@ -223,7 +234,6 @@ class Fetcher:
print("%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all)) print("%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all))
done = 1 done = 1
for i in buildinfo.deps: for i in buildinfo.deps:
i.makeurls(self.cachedir, self.urllist)
if not os.path.exists(i.fullfilename): if not os.path.exists(i.fullfilename):
if self.offline: if self.offline:
raise oscerr.OscIOError(None, raise oscerr.OscIOError(None,