Fix handle_update_repos for Factory and leap
This commit is contained in:
parent
48179b3617
commit
84445efb65
@ -13,7 +13,6 @@ import logging
|
|||||||
from osc import conf
|
from osc import conf
|
||||||
from osclib.conf import Config
|
from osclib.conf import Config
|
||||||
from osclib.stagingapi import StagingAPI
|
from osclib.stagingapi import StagingAPI
|
||||||
from pkglistgen import solv_utils
|
|
||||||
from pkglistgen.tool import PkgListGen
|
from pkglistgen.tool import PkgListGen
|
||||||
from pkglistgen.update_repo_handler import update_project
|
from pkglistgen.update_repo_handler import update_project
|
||||||
|
|
||||||
@ -33,21 +32,6 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
|||||||
|
|
||||||
return tool
|
return tool
|
||||||
|
|
||||||
def do_create_sle_weakremovers(self, subcmd, opts, target, *prjs):
|
|
||||||
"""${cmd_name}: generate list of obsolete packages for SLE
|
|
||||||
|
|
||||||
The globally specified repositories are taken as the current
|
|
||||||
package set. All solv files specified on the command line
|
|
||||||
are old versions of those repos.
|
|
||||||
|
|
||||||
The command outputs the weakremovers.inc to be used in
|
|
||||||
000package-groups
|
|
||||||
|
|
||||||
${cmd_usage}
|
|
||||||
${cmd_option_list}
|
|
||||||
"""
|
|
||||||
return self.tool.create_sle_weakremovers(target, prjs)
|
|
||||||
|
|
||||||
def do_handle_update_repos(self, subcmd, opts, project):
|
def do_handle_update_repos(self, subcmd, opts, project):
|
||||||
"""${cmd_name}: Update 00update-repos
|
"""${cmd_name}: Update 00update-repos
|
||||||
|
|
||||||
@ -56,7 +40,7 @@ class CommandLineInterface(ToolBase.CommandLineInterface):
|
|||||||
${cmd_usage}
|
${cmd_usage}
|
||||||
${cmd_option_list}
|
${cmd_option_list}
|
||||||
"""
|
"""
|
||||||
return update_project(conf.config['apiurl'], project)
|
update_project(conf.config['apiurl'], project)
|
||||||
|
|
||||||
@cmdln.option('-f', '--force', action='store_true', help='continue even if build is in progress')
|
@cmdln.option('-f', '--force', action='store_true', help='continue even if build is in progress')
|
||||||
@cmdln.option('-p', '--project', help='target project')
|
@cmdln.option('-p', '--project', help='target project')
|
||||||
|
@ -1,154 +0,0 @@
|
|||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import filecmp
|
|
||||||
import glob
|
|
||||||
import gzip
|
|
||||||
import hashlib
|
|
||||||
import io
|
|
||||||
import logging
|
|
||||||
import os.path
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from lxml import etree as ET
|
|
||||||
|
|
||||||
from osc import conf
|
|
||||||
from osclib.util import project_list_family
|
|
||||||
from osclib.util import project_list_family_prior
|
|
||||||
from osclib.conf import Config
|
|
||||||
from osclib.cache_manager import CacheManager
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
import solv
|
|
||||||
|
|
||||||
# share header cache with repochecker
|
|
||||||
CACHEDIR = CacheManager.directory('repository-meta')
|
|
||||||
|
|
||||||
try:
|
|
||||||
from urllib.parse import urljoin
|
|
||||||
except ImportError:
|
|
||||||
# python 2.x
|
|
||||||
from urlparse import urljoin
|
|
||||||
|
|
||||||
logger = logging.getLogger()
|
|
||||||
|
|
||||||
def dump_solv_build(baseurl):
|
|
||||||
"""Determine repo format and build string from remote repository."""
|
|
||||||
|
|
||||||
if not baseurl.endswith('/'):
|
|
||||||
baseurl += '/'
|
|
||||||
|
|
||||||
if 'update' in baseurl:
|
|
||||||
# Could look at .repo file or repomd.xml, but larger change.
|
|
||||||
return 'update-' + os.path.basename(os.path.normpath(baseurl)), 'update'
|
|
||||||
|
|
||||||
url = urljoin(baseurl, 'media.1/media')
|
|
||||||
with requests.get(url) as media:
|
|
||||||
for i, line in enumerate(media.iter_lines()):
|
|
||||||
if i != 1:
|
|
||||||
continue
|
|
||||||
name = line
|
|
||||||
|
|
||||||
if name is not None and '-Build' in name:
|
|
||||||
return name, 'media'
|
|
||||||
|
|
||||||
url = urljoin(baseurl, 'media.1/build')
|
|
||||||
with requests.get(url) as build:
|
|
||||||
name = build.content.strip()
|
|
||||||
|
|
||||||
if name is not None and '-Build' in name:
|
|
||||||
return name, 'build'
|
|
||||||
|
|
||||||
raise Exception(baseurl + 'media.1/{media,build} includes no build number')
|
|
||||||
|
|
||||||
def dump_solv(baseurl, output_dir, overwrite):
|
|
||||||
name = None
|
|
||||||
ofh = sys.stdout
|
|
||||||
if output_dir:
|
|
||||||
build, repo_style = dump_solv_build(baseurl)
|
|
||||||
name = os.path.join(output_dir, '{}.solv'.format(build))
|
|
||||||
# For update repo name never changes so always update.
|
|
||||||
if not overwrite and repo_style != 'update' and os.path.exists(name):
|
|
||||||
logger.info('%s exists', name)
|
|
||||||
return name
|
|
||||||
|
|
||||||
pool = solv.Pool()
|
|
||||||
pool.setarch()
|
|
||||||
|
|
||||||
repo = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
|
||||||
path_prefix = 'suse/' if name and repo_style == 'build' else ''
|
|
||||||
url = urljoin(baseurl, path_prefix + 'repodata/repomd.xml')
|
|
||||||
repomd = requests.get(url)
|
|
||||||
ns = {'r': 'http://linux.duke.edu/metadata/repo'}
|
|
||||||
root = ET.fromstring(repomd.content)
|
|
||||||
primary_element = root.find('.//r:data[@type="primary"]', ns)
|
|
||||||
location = primary_element.find('r:location', ns).get('href')
|
|
||||||
sha256_expected = primary_element.find('r:checksum[@type="sha256"]', ns).text
|
|
||||||
|
|
||||||
# No build information in update repo to use repomd checksum in name.
|
|
||||||
if repo_style == 'update':
|
|
||||||
name = os.path.join(output_dir, '{}::{}.solv'.format(build, sha256_expected))
|
|
||||||
if not overwrite and os.path.exists(name):
|
|
||||||
logger.info('%s exists', name)
|
|
||||||
return name
|
|
||||||
|
|
||||||
# Only consider latest update repo so remove old versions.
|
|
||||||
# Pre-release builds only make sense for non-update repos and once
|
|
||||||
# releases then only relevant for next product which does not
|
|
||||||
# consider pre-release from previous version.
|
|
||||||
for old_solv in glob.glob(os.path.join(output_dir, '{}::*.solv'.format(build))):
|
|
||||||
os.remove(old_solv)
|
|
||||||
|
|
||||||
f = tempfile.TemporaryFile()
|
|
||||||
f.write(repomd.content)
|
|
||||||
f.flush()
|
|
||||||
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
||||||
repo.add_repomdxml(f, 0)
|
|
||||||
url = urljoin(baseurl, path_prefix + location)
|
|
||||||
with requests.get(url, stream=True) as primary:
|
|
||||||
sha256 = hashlib.sha256(primary.content).hexdigest()
|
|
||||||
if sha256 != sha256_expected:
|
|
||||||
raise Exception('checksums do not match {} != {}'.format(sha256, sha256_expected))
|
|
||||||
|
|
||||||
content = gzip.GzipFile(fileobj=io.BytesIO(primary.content))
|
|
||||||
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
||||||
f.write(content.read())
|
|
||||||
f.flush()
|
|
||||||
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
||||||
repo.add_rpmmd(f, None, 0)
|
|
||||||
repo.create_stubs()
|
|
||||||
|
|
||||||
ofh = open(name + '.new', 'w')
|
|
||||||
repo.write(ofh)
|
|
||||||
|
|
||||||
if name is not None:
|
|
||||||
# Only update file if overwrite or different.
|
|
||||||
ofh.flush() # Ensure entirely written before comparing.
|
|
||||||
if not overwrite and os.path.exists(name) and filecmp.cmp(name + '.new', name, shallow=False):
|
|
||||||
logger.debug('file identical, skip dumping')
|
|
||||||
os.remove(name + '.new')
|
|
||||||
else:
|
|
||||||
os.rename(name + '.new', name)
|
|
||||||
return name
|
|
||||||
|
|
||||||
def solv_merge(solv_merged, *solvs):
|
|
||||||
solvs = list(solvs) # From tuple.
|
|
||||||
|
|
||||||
if os.path.exists(solv_merged):
|
|
||||||
modified = map(os.path.getmtime, [solv_merged] + solvs)
|
|
||||||
if max(modified) <= modified[0]:
|
|
||||||
# The two inputs were modified before or at the same as merged.
|
|
||||||
logger.debug('merge skipped for {}'.format(solv_merged))
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(solv_merged, 'w') as handle:
|
|
||||||
p = subprocess.Popen(['mergesolv'] + solvs, stdout=handle)
|
|
||||||
p.communicate()
|
|
||||||
|
|
||||||
if p.returncode:
|
|
||||||
raise Exception('failed to create merged solv file')
|
|
@ -10,6 +10,7 @@ import shutil
|
|||||||
import subprocess
|
import subprocess
|
||||||
import yaml
|
import yaml
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
from lxml import etree as ET
|
from lxml import etree as ET
|
||||||
|
|
||||||
@ -33,13 +34,16 @@ except ImportError:
|
|||||||
# python 2.x
|
# python 2.x
|
||||||
from urlparse import urlparse
|
from urlparse import urlparse
|
||||||
|
|
||||||
from pkglistgen import file_utils, solv_utils
|
from pkglistgen import file_utils
|
||||||
from pkglistgen.group import Group
|
from pkglistgen.group import Group
|
||||||
|
|
||||||
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
|
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
PRODUCT_SERVICE = '/usr/lib/obs/service/create_single_product'
|
PRODUCT_SERVICE = '/usr/lib/obs/service/create_single_product'
|
||||||
|
|
||||||
|
# share header cache with repochecker
|
||||||
|
CACHEDIR = CacheManager.directory('repository-meta')
|
||||||
|
|
||||||
class PkgListGen(ToolBase.ToolBase):
|
class PkgListGen(ToolBase.ToolBase):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -198,7 +202,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
|
|
||||||
for project, reponame in self.repos:
|
for project, reponame in self.repos:
|
||||||
repo = pool.add_repo(project)
|
repo = pool.add_repo(project)
|
||||||
s = os.path.join(solv_utils.CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, reponame, arch))
|
s = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, reponame, arch))
|
||||||
r = repo.add_solv(s)
|
r = repo.add_solv(s)
|
||||||
if not r:
|
if not r:
|
||||||
if not self.did_update:
|
if not self.did_update:
|
||||||
@ -323,7 +327,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
for project, repo in self.repos:
|
for project, repo in self.repos:
|
||||||
for arch in architectures:
|
for arch in architectures:
|
||||||
# TODO: refactor to common function with repo_checker.py
|
# TODO: refactor to common function with repo_checker.py
|
||||||
d = os.path.join(solv_utils.CACHEDIR, project, repo, arch)
|
d = os.path.join(CACHEDIR, project, repo, arch)
|
||||||
if not os.path.exists(d):
|
if not os.path.exists(d):
|
||||||
os.makedirs(d)
|
os.makedirs(d)
|
||||||
|
|
||||||
@ -335,7 +339,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
|
|
||||||
# Would be preferable to include hash in name, but cumbersome to handle without
|
# Would be preferable to include hash in name, but cumbersome to handle without
|
||||||
# reworking a fair bit since the state needs to be tracked.
|
# reworking a fair bit since the state needs to be tracked.
|
||||||
solv_file = os.path.join(solv_utils.CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
solv_file = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
||||||
solv_file_hash = '{}::{}'.format(solv_file, state)
|
solv_file_hash = '{}::{}'.format(solv_file, state)
|
||||||
if os.path.exists(solv_file) and os.path.exists(solv_file_hash):
|
if os.path.exists(solv_file) and os.path.exists(solv_file_hash):
|
||||||
# Solve file exists and hash unchanged, skip updating solv.
|
# Solve file exists and hash unchanged, skip updating solv.
|
||||||
@ -371,14 +375,13 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
|
|
||||||
return global_update
|
return global_update
|
||||||
|
|
||||||
def create_sle_weakremovers(self, target, oldprjs):
|
def create_weakremovers(self, target, target_config, directory):
|
||||||
self.repos = []
|
self.repos = self.expand_repos(target, 'standard')
|
||||||
for prj in list(oldprjs) + [target]:
|
self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
|
||||||
self.repos += self.expand_repos(prj, 'standard')
|
|
||||||
|
|
||||||
self.update_repos(self.all_architectures)
|
self.update_repos(self.all_architectures)
|
||||||
|
|
||||||
drops = dict()
|
drops = dict()
|
||||||
|
dropped_repos = dict()
|
||||||
for arch in self.all_architectures:
|
for arch in self.all_architectures:
|
||||||
pool = solv.Pool()
|
pool = solv.Pool()
|
||||||
pool.setarch(arch)
|
pool.setarch(arch)
|
||||||
@ -386,7 +389,7 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
sysrepo = None
|
sysrepo = None
|
||||||
for project, repo in self.repos:
|
for project, repo in self.repos:
|
||||||
self.logger.debug('processing %s/%s/%s', project, repo, arch)
|
self.logger.debug('processing %s/%s/%s', project, repo, arch)
|
||||||
fn = os.path.join(solv_utils.CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
fn = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
||||||
r = pool.add_repo('/'.join([project, repo]))
|
r = pool.add_repo('/'.join([project, repo]))
|
||||||
r.add_solv(fn)
|
r.add_solv(fn)
|
||||||
if project == target and repo == 'standard':
|
if project == target and repo == 'standard':
|
||||||
@ -394,6 +397,17 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
|
|
||||||
pool.createwhatprovides()
|
pool.createwhatprovides()
|
||||||
|
|
||||||
|
for oldrepo in glob.glob('/space/opensuse/home:coolo/00update-repos/*.packages.xz'):
|
||||||
|
repo = pool.add_repo(oldrepo)
|
||||||
|
defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
|
||||||
|
f = tempfile.TemporaryFile()
|
||||||
|
# FIXME: port to lzma module with python3
|
||||||
|
st = subprocess.call(['xz', '-cd', oldrepo], stdout=f.fileno())
|
||||||
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
||||||
|
repo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)
|
||||||
|
|
||||||
|
pool.createwhatprovides()
|
||||||
|
|
||||||
for s in pool.solvables_iter():
|
for s in pool.solvables_iter():
|
||||||
# we only want the old repos
|
# we only want the old repos
|
||||||
if s.repo == sysrepo: continue
|
if s.repo == sysrepo: continue
|
||||||
@ -404,96 +418,50 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
for s2 in pool.whatprovides(s.nameid):
|
for s2 in pool.whatprovides(s.nameid):
|
||||||
if s2.repo == sysrepo and s.nameid == s2.nameid:
|
if s2.repo == sysrepo and s.nameid == s2.nameid:
|
||||||
haveit = True
|
haveit = True
|
||||||
|
break
|
||||||
if haveit:
|
if haveit:
|
||||||
continue
|
continue
|
||||||
haveit = False
|
obsolete = False
|
||||||
|
|
||||||
# check for already obsoleted packages
|
# check for already obsoleted packages
|
||||||
nevr = pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
|
nevr = pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
|
||||||
for s2 in pool.whatmatchesdep(solv.SOLVABLE_OBSOLETES, nevr):
|
for s2 in pool.whatmatchesdep(solv.SOLVABLE_OBSOLETES, nevr):
|
||||||
if s2.repo == sysrepo: continue
|
if s2.repo == sysrepo:
|
||||||
haveit = True
|
obsolete = True
|
||||||
if haveit:
|
continue
|
||||||
|
if obsolete:
|
||||||
continue
|
continue
|
||||||
drops.setdefault(s.name, {'repo': s.repo.name, 'archs': set()})
|
drops.setdefault(s.name, {'repo': s.repo.name, 'archs': set()})
|
||||||
drops[s.name]['archs'].add(arch)
|
drops[s.name]['archs'].add(arch)
|
||||||
|
dropped_repos[s.repo.name] = 1
|
||||||
|
|
||||||
for project, repo in sorted(self.repos):
|
del pool
|
||||||
|
|
||||||
|
for repo in sorted(dropped_repos.keys()):
|
||||||
|
repo_output = False
|
||||||
exclusives = dict()
|
exclusives = dict()
|
||||||
print('#', project)
|
|
||||||
for name in sorted(drops.keys()):
|
for name in sorted(drops.keys()):
|
||||||
#
|
#
|
||||||
if drops[name]['repo'] != '{}/{}'.format(project, repo):
|
if drops[name]['repo'] != repo:
|
||||||
#print(drops[name]['repo'], '!=', '{}/{}'.format(project, repo))
|
|
||||||
continue
|
continue
|
||||||
if len(drops[name]['archs']) == len(self.all_architectures):
|
if len(drops[name]['archs']) == len(self.all_architectures):
|
||||||
|
if not repo_output:
|
||||||
|
print('#', repo)
|
||||||
|
repo_output = True
|
||||||
print('Provides: weakremover({})'.format(name))
|
print('Provides: weakremover({})'.format(name))
|
||||||
else:
|
else:
|
||||||
jarch = ' '.join(sorted(drops[name]['archs']))
|
jarch = ' '.join(sorted(drops[name]['archs']))
|
||||||
exclusives.setdefault(jarch, []).append(name)
|
exclusives.setdefault(jarch, []).append(name)
|
||||||
|
|
||||||
for arch in sorted(exclusives.keys()):
|
for arch in sorted(exclusives.keys()):
|
||||||
|
if not repo_output:
|
||||||
|
print('#', repo)
|
||||||
|
repo_output = True
|
||||||
print('%ifarch {}'.format(arch))
|
print('%ifarch {}'.format(arch))
|
||||||
for name in sorted(exclusives[arch]):
|
for name in sorted(exclusives[arch]):
|
||||||
print('Provides: weakremover({})'.format(name))
|
print('Provides: weakremover({})'.format(name))
|
||||||
print('%endif')
|
print('%endif')
|
||||||
|
|
||||||
# TODO: no longer used, needs to be migrated
|
|
||||||
def create_droplist(self, output_dir, oldsolv):
|
|
||||||
drops = dict()
|
|
||||||
|
|
||||||
for arch in self.filtered_architectures:
|
|
||||||
|
|
||||||
for old in oldsolv:
|
|
||||||
|
|
||||||
self.logger.debug('%s: processing %s', arch, old)
|
|
||||||
|
|
||||||
pool = solv.Pool()
|
|
||||||
pool.setarch(arch)
|
|
||||||
|
|
||||||
for project, repo in self.repos:
|
|
||||||
fn = os.path.join(solv_utils.CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
|
||||||
r = pool.add_repo(project)
|
|
||||||
r.add_solv(fn)
|
|
||||||
|
|
||||||
sysrepo = pool.add_repo(os.path.basename(old).replace('.merged.solv', ''))
|
|
||||||
sysrepo.add_solv(old)
|
|
||||||
|
|
||||||
pool.createwhatprovides()
|
|
||||||
|
|
||||||
for s in sysrepo.solvables:
|
|
||||||
haveit = False
|
|
||||||
for s2 in pool.whatprovides(s.nameid):
|
|
||||||
if s2.repo == sysrepo or s.nameid != s2.nameid:
|
|
||||||
continue
|
|
||||||
haveit = True
|
|
||||||
if haveit:
|
|
||||||
continue
|
|
||||||
nevr = pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
|
|
||||||
for s2 in pool.whatmatchesdep(solv.SOLVABLE_OBSOLETES, nevr):
|
|
||||||
if s2.repo == sysrepo:
|
|
||||||
continue
|
|
||||||
haveit = True
|
|
||||||
if haveit:
|
|
||||||
continue
|
|
||||||
if s.name not in drops:
|
|
||||||
drops[s.name] = sysrepo.name
|
|
||||||
|
|
||||||
# mark it explicitly to avoid having 2 pools while GC is not run
|
|
||||||
del pool
|
|
||||||
|
|
||||||
ofh = sys.stdout
|
|
||||||
if output_dir:
|
|
||||||
name = os.path.join(output_dir, 'obsoletepackages.inc')
|
|
||||||
ofh = open(name, 'w')
|
|
||||||
|
|
||||||
for reponame in sorted(set(drops.values())):
|
|
||||||
print('<!-- %s -->' % reponame, file=ofh)
|
|
||||||
for p in sorted(drops):
|
|
||||||
if drops[p] != reponame:
|
|
||||||
continue
|
|
||||||
print(' <obsoletepackage>%s</obsoletepackage>' % p, file=ofh)
|
|
||||||
|
|
||||||
def solve_project(self, ignore_unresolvable=False, ignore_recommended=False, locale=None, locales_from=None):
|
def solve_project(self, ignore_unresolvable=False, ignore_recommended=False, locale=None, locales_from=None):
|
||||||
self.load_all_groups()
|
self.load_all_groups()
|
||||||
if not self.output:
|
if not self.output:
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import filecmp
|
|
||||||
import glob
|
import glob
|
||||||
import gzip
|
import gzip
|
||||||
import hashlib
|
import hashlib
|
||||||
@ -19,9 +18,6 @@ from lxml import etree as ET
|
|||||||
|
|
||||||
from osc import conf
|
from osc import conf
|
||||||
import osc.core
|
import osc.core
|
||||||
from osclib.util import project_list_family
|
|
||||||
from osclib.util import project_list_family_prior
|
|
||||||
from osclib.conf import Config
|
|
||||||
from osclib.cache_manager import CacheManager
|
from osclib.cache_manager import CacheManager
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
@ -31,23 +27,27 @@ import solv
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin, urlparse
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# python 2.x
|
# python 2.x
|
||||||
from urlparse import urljoin
|
from urlparse import urljoin, urlparse
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
def dump_solv_build(baseurl):
|
def dump_solv_build(baseurl):
|
||||||
"""Determine repo format and build string from remote repository."""
|
"""Determine repo format and build string from remote repository."""
|
||||||
|
|
||||||
buildre = re.compile('.*-Build(.*)')
|
buildre = re.compile(r'.*-Build(.*)')
|
||||||
|
factoryre = re.compile(r'openSUSE-(\d*)-i586-x86_64-Build.*')
|
||||||
url = urljoin(baseurl, 'media.1/media')
|
url = urljoin(baseurl, 'media.1/media')
|
||||||
with requests.get(url) as media:
|
with requests.get(url) as media:
|
||||||
if media.status_code == requests.codes.ok:
|
if media.status_code == requests.codes.ok:
|
||||||
for i, line in enumerate(media.iter_lines()):
|
for i, line in enumerate(media.iter_lines()):
|
||||||
if i != 1:
|
if i != 1:
|
||||||
continue
|
continue
|
||||||
|
build = factoryre.match(line)
|
||||||
|
if build:
|
||||||
|
return build.group(1)
|
||||||
build = buildre.match(line)
|
build = buildre.match(line)
|
||||||
if build:
|
if build:
|
||||||
return build.group(1)
|
return build.group(1)
|
||||||
@ -120,7 +120,7 @@ def parse_susetags(repo, baseurl):
|
|||||||
defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
|
defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
|
||||||
descrdir = repo.meta.lookup_str(solv.SUSETAGS_DESCRDIR)
|
descrdir = repo.meta.lookup_str(solv.SUSETAGS_DESCRDIR)
|
||||||
if not descrdir:
|
if not descrdir:
|
||||||
descrdir = "suse/setup/descr"
|
descrdir = 'suse/setup/descr'
|
||||||
|
|
||||||
url = urljoin(baseurl, descrdir + '/packages.gz')
|
url = urljoin(baseurl, descrdir + '/packages.gz')
|
||||||
with requests.get(url, stream=True) as packages:
|
with requests.get(url, stream=True) as packages:
|
||||||
@ -152,37 +152,21 @@ def dump_solv(name, baseurl):
|
|||||||
|
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def fetch_item(key, opts):
|
def print_repo_delta(pool, repo2, packages_file):
|
||||||
baseurl = opts['url']
|
|
||||||
if not baseurl.endswith('/'):
|
|
||||||
baseurl += '/'
|
|
||||||
|
|
||||||
output_dir = '/space/opensuse/home:coolo/00update-repos'
|
|
||||||
if opts.get('refresh', False):
|
|
||||||
build = dump_solv_build(baseurl)
|
|
||||||
name = os.path.join(output_dir, key + '_{}.solv'.format(build))
|
|
||||||
else:
|
|
||||||
name = os.path.join(output_dir, key + '.solv')
|
|
||||||
|
|
||||||
if os.path.exists(name):
|
|
||||||
return name
|
|
||||||
|
|
||||||
return dump_solv(name, baseurl)
|
|
||||||
|
|
||||||
def print_repo_delta(repo1, repo2, packages_file):
|
|
||||||
print('=Ver: 2.0', file=packages_file)
|
print('=Ver: 2.0', file=packages_file)
|
||||||
present = dict()
|
present = dict()
|
||||||
for s in repo1.solvables:
|
for s in pool.solvables_iter():
|
||||||
present["{}/{}".format(s.name, s.arch)] = s.evr
|
if s.repo != repo2:
|
||||||
|
key = '{}/{}'.format(s.name, s.arch)
|
||||||
|
present.setdefault(key, {})
|
||||||
|
present[key][s.evr] = s.repo
|
||||||
for s in repo2.solvables:
|
for s in repo2.solvables:
|
||||||
key = "{}/{}".format(s.name, s.arch)
|
if s.arch == 'src': continue
|
||||||
if key in present:
|
key = '{}/{}'.format(s.name, s.arch)
|
||||||
if present[key] != s.evr:
|
if present.get(key, {}).get(s.evr):
|
||||||
print('# UPDATE', s.name, s.arch, present[key], '->', s.evr, file=packages_file)
|
|
||||||
else:
|
|
||||||
continue
|
continue
|
||||||
else:
|
elif not key in present:
|
||||||
print('# NEW', s.name,s.arch, file=packages_file)
|
print('# NEW', s.name, s.arch, file=packages_file)
|
||||||
evr = s.evr.split('-')
|
evr = s.evr.split('-')
|
||||||
release = evr.pop()
|
release = evr.pop()
|
||||||
print('=Pkg:', s.name, '-'.join(evr), release, s.arch, file=packages_file)
|
print('=Pkg:', s.name, '-'.join(evr), release, s.arch, file=packages_file)
|
||||||
@ -192,37 +176,64 @@ def print_repo_delta(repo1, repo2, packages_file):
|
|||||||
print('-Prv:', file=packages_file)
|
print('-Prv:', file=packages_file)
|
||||||
|
|
||||||
def update_project(apiurl, project):
|
def update_project(apiurl, project):
|
||||||
url = osc.core.makeurl(apiurl, ['source', project, '00update-repos', 'config.yml'])
|
# Cache dir specific to hostname and project.
|
||||||
root = yaml.safe_load(osc.core.http_GET(url))
|
host = urlparse(apiurl).hostname
|
||||||
|
cache_dir = CacheManager.directory('update_repo_handler', host, project)
|
||||||
|
repo_dir = os.path.join(cache_dir, '000update-repos')
|
||||||
|
|
||||||
|
# development aid
|
||||||
|
checkout = True
|
||||||
|
if checkout:
|
||||||
|
if os.path.exists(cache_dir):
|
||||||
|
shutil.rmtree(cache_dir)
|
||||||
|
os.makedirs(cache_dir)
|
||||||
|
|
||||||
|
osc.core.checkout_package(apiurl, project, '000update-repos', expand_link=True, prj_dir=cache_dir)
|
||||||
|
|
||||||
|
root = yaml.safe_load(open(os.path.join(repo_dir, 'config.yml')))
|
||||||
for item in root:
|
for item in root:
|
||||||
key = item.keys()[0]
|
key = item.keys()[0]
|
||||||
|
opts = item[key]
|
||||||
# cast 15.1 to string :)
|
# cast 15.1 to string :)
|
||||||
#fetch_item(str(key), item[key])
|
key = str(key)
|
||||||
|
if not opts['url'].endswith('/'):
|
||||||
|
opts['url'] += '/'
|
||||||
|
|
||||||
|
if opts.get('refresh', False):
|
||||||
|
opts['build'] = dump_solv_build(opts['url'])
|
||||||
|
path = '{}_{}.packages'.format(key, opts['build'])
|
||||||
|
else:
|
||||||
|
path = key + '.packages'
|
||||||
|
packages_file = os.path.join(repo_dir, path)
|
||||||
|
|
||||||
|
if os.path.exists(packages_file + '.xz'):
|
||||||
|
print(path, 'already exists')
|
||||||
|
continue
|
||||||
|
|
||||||
|
solv_file = packages_file + '.solv'
|
||||||
|
dump_solv(solv_file, opts['url'])
|
||||||
|
|
||||||
pool = solv.Pool()
|
pool = solv.Pool()
|
||||||
pool.setarch()
|
pool.setarch()
|
||||||
|
|
||||||
repo0 = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
if opts.get('refresh', False):
|
||||||
|
for file in glob.glob(os.path.join(repo_dir, '{}_*.packages.xz'.format(key))):
|
||||||
prevfile = None
|
repo = pool.add_repo(file)
|
||||||
for file in glob.glob('/space/opensuse/home:coolo/00update-repos/15.1_*.solv'):
|
|
||||||
if prevfile:
|
|
||||||
repo1 = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
|
||||||
repo1.add_solv(prevfile)
|
|
||||||
|
|
||||||
repo2 = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
|
||||||
repo2.add_solv(file)
|
|
||||||
p = file.replace('.solv', '.packages')
|
|
||||||
print_repo_delta(repo1, repo2, open(p, 'w'))
|
|
||||||
prevfile = file
|
|
||||||
|
|
||||||
#repo2 = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
|
||||||
#repo2.add_solv('/space/opensuse/home:coolo/00update-repos/15.1_297.3.solv')
|
|
||||||
|
|
||||||
#print_repo_delta(repo1, repo2, open('/space/opensuse/home:coolo/00update-repos/15.1_297.3.packages', 'w'))
|
|
||||||
|
|
||||||
|
|
||||||
def import_one(pool):
|
|
||||||
repo = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
|
||||||
defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
|
defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
|
||||||
repo.add_susetags(f, defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)
|
f = tempfile.TemporaryFile()
|
||||||
|
# FIXME: port to lzma module with python3
|
||||||
|
st = subprocess.call(['xz', '-cd', file], stdout=f.fileno())
|
||||||
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
||||||
|
repo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)
|
||||||
|
|
||||||
|
repo1 = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
||||||
|
repo1.add_solv(solv_file)
|
||||||
|
|
||||||
|
print_repo_delta(pool, repo1, open(packages_file, 'w'))
|
||||||
|
subprocess.call(['xz', '-9', packages_file])
|
||||||
|
os.unlink(solv_file)
|
||||||
|
|
||||||
|
url = osc.core.makeurl(apiurl, ['source', project, '000update-repos', path + '.xz'])
|
||||||
|
osc.core.http_PUT(url, data=open(packages_file + '.xz').read())
|
||||||
|
|
||||||
|
del pool
|
||||||
|
Loading…
x
Reference in New Issue
Block a user