1
0
mirror of https://github.com/openSUSE/osc.git synced 2024-11-09 22:36:14 +01:00

Merge pull request #952 from adrianschroeter/fix_download

fix downloading from mirrors
This commit is contained in:
Adrian Schröter 2021-10-06 13:22:48 +02:00 committed by GitHub
commit 2b1c04757b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 22 additions and 4 deletions

View File

@ -157,6 +157,7 @@ class Buildinfo:
self.keys = []
self.prjkeys = []
self.pathes = []
self.urls = {}
self.modules = []
for node in root.findall('module'):
self.modules.append(node.text)
@ -174,7 +175,12 @@ class Buildinfo:
self.projects[p.project] = 1
self.deps.append(p)
for node in root.findall('path'):
# old simple list for compatibility
# XXX: really old? This is currently used for kiwi builds
self.pathes.append(node.get('project')+"/"+node.get('repository'))
# a hash providing the matching URL for specific repos for newer OBS instances
if node.get('url'):
self.urls[node.get('project')+"/"+node.get('repository')] = node.get('url') + '/%(arch)s/%(filename)s'
self.vminstall_list = [ dep.name for dep in self.deps if dep.vminstall ]
self.preinstall_list = [ dep.name for dep in self.deps if dep.preinstall ]
@ -1024,7 +1030,9 @@ def main(apiurl, opts, argv):
else:
urllist = config['urllist']
# OBS 1.5 and before has no downloadurl defined in buildinfo
# OBS 1.5 and before has no downloadurl defined in buildinfo, but it is obsolete again meanwhile.
# we have now specific download repositories per repository. Could be removed IMHO, since the api fallback
# is there. In worst case it could fetch the wrong rpm...
if bi.downloadurl:
urllist.append(bi.downloadurl + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s')
if opts.disable_cpio_bulk_download:

View File

@ -36,6 +36,7 @@ class Fetcher:
self.progress_obj = create_text_meter(use_pb_fallback=False)
self.cachedir = cachedir
# generic download URL lists
self.urllist = urllist
self.modules = modules
self.http_debug = http_debug
@ -193,17 +194,26 @@ class Fetcher:
print(e, file=sys.stderr)
sys.exit(1)
def _build_urllist(self, buildinfo, pac):
urllist = self.urllist
key = '%s/%s' % (pac.project, pac.repository)
project_repo_url = buildinfo.urls.get(key)
if project_repo_url is not None:
urllist = [project_repo_url]
return urllist
def run(self, buildinfo):
cached = 0
all = len(buildinfo.deps)
for i in buildinfo.deps:
i.makeurls(self.cachedir, self.urllist)
urllist = self._build_urllist(buildinfo, i)
i.makeurls(self.cachedir, urllist)
# find container extension by looking in the cache
if i.name.startswith('container:') and i.fullfilename.endswith('.tar.xz'):
for ext in ['.tar.xz', '.tar.gz', '.tar']:
if os.path.exists(i.fullfilename[:-7] + ext):
i.canonname = i.canonname[:-7] + ext
i.makeurls(self.cachedir, self.urllist)
i.makeurls(self.cachedir, urllist)
if os.path.exists(i.fullfilename):
cached += 1
@ -216,6 +226,7 @@ class Fetcher:
if not hdrmd5 or hdrmd5 != i.hdrmd5:
os.unlink(i.fullfilename)
cached -= 1
miss = 0
needed = all - cached
if all:
@ -223,7 +234,6 @@ class Fetcher:
print("%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all))
done = 1
for i in buildinfo.deps:
i.makeurls(self.cachedir, self.urllist)
if not os.path.exists(i.fullfilename):
if self.offline:
raise oscerr.OscIOError(None,