Merge pull request #2660 from coolo/pkglistgen_no_cache
pkglistgen: Skip relying on the global cache directory
This commit is contained in:
commit
417c4e6d50
@ -57,7 +57,6 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
self.unwanted = set()
|
self.unwanted = set()
|
||||||
self.output = None
|
self.output = None
|
||||||
self.locales = set()
|
self.locales = set()
|
||||||
self.did_update = False
|
|
||||||
self.filtered_architectures = None
|
self.filtered_architectures = None
|
||||||
self.dry_run = False
|
self.dry_run = False
|
||||||
self.all_architectures = None
|
self.all_architectures = None
|
||||||
@ -218,13 +217,9 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
|
|
||||||
for project, reponame in self.repos:
|
for project, reponame in self.repos:
|
||||||
repo = pool.add_repo(project)
|
repo = pool.add_repo(project)
|
||||||
s = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, reponame, arch))
|
s = 'repo-{}-{}-{}.solv'.format(project, reponame, arch)
|
||||||
r = repo.add_solv(s)
|
if not repo.add_solv(s):
|
||||||
if not r:
|
raise Exception('failed to add repo {}/{}/{}.'.format(project, reponame, arch))
|
||||||
if not self.did_update:
|
|
||||||
raise Exception(
|
|
||||||
'failed to add repo {}/{}/{}. Need to run update first?'.format(project, reponame, arch))
|
|
||||||
continue
|
|
||||||
for solvable in repo.solvables_iter():
|
for solvable in repo.solvables_iter():
|
||||||
if ignore_conflicts:
|
if ignore_conflicts:
|
||||||
solvable.unset(solv.SOLVABLE_CONFLICTS)
|
solvable.unset(solv.SOLVABLE_CONFLICTS)
|
||||||
@ -339,15 +334,9 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
def update_repos(self, architectures):
|
def update_repos(self, architectures):
|
||||||
# only there to parse the repos
|
# only there to parse the repos
|
||||||
bs_mirrorfull = os.path.join(SCRIPT_PATH, '..', 'bs_mirrorfull')
|
bs_mirrorfull = os.path.join(SCRIPT_PATH, '..', 'bs_mirrorfull')
|
||||||
global_update = False
|
|
||||||
|
|
||||||
for project, repo in self.repos:
|
for project, repo in self.repos:
|
||||||
for arch in architectures:
|
for arch in architectures:
|
||||||
# TODO: refactor to common function with repo_checker.py
|
|
||||||
d = os.path.join(CACHEDIR, project, repo, arch)
|
|
||||||
if not os.path.exists(d):
|
|
||||||
os.makedirs(d)
|
|
||||||
|
|
||||||
# Fetch state before mirroring in-case it changes during download.
|
# Fetch state before mirroring in-case it changes during download.
|
||||||
state = repository_arch_state(self.apiurl, project, repo, arch)
|
state = repository_arch_state(self.apiurl, project, repo, arch)
|
||||||
|
|
||||||
@ -355,19 +344,11 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
# Repo might not have this architecture
|
# Repo might not have this architecture
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Would be preferable to include hash in name, but cumbersome to handle without
|
d = os.path.join(CACHEDIR, project, repo, arch)
|
||||||
# reworking a fair bit since the state needs to be tracked.
|
if not os.path.exists(d):
|
||||||
solv_file = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
os.makedirs(d)
|
||||||
solv_file_hash = '{}::{}'.format(solv_file, state)
|
|
||||||
if os.path.exists(solv_file) and os.path.exists(solv_file_hash):
|
|
||||||
# Solve file exists and hash unchanged, skip updating solv.
|
|
||||||
self.logger.debug('skipping solv generation for {} due to matching state {}'.format(
|
|
||||||
'/'.join([project, repo, arch]), state))
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Either hash changed or new, so remove any old hash files.
|
solv_file = 'repo-{}-{}-{}.solv'.format(project, repo, arch)
|
||||||
file_utils.unlink_list(None, glob.glob(solv_file + '::*'))
|
|
||||||
global_update = True
|
|
||||||
|
|
||||||
self.logger.debug('updating %s', d)
|
self.logger.debug('updating %s', d)
|
||||||
args = [bs_mirrorfull]
|
args = [bs_mirrorfull]
|
||||||
@ -390,12 +371,6 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
if p.wait() != 0:
|
if p.wait() != 0:
|
||||||
raise Exception("rpm2solv failed")
|
raise Exception("rpm2solv failed")
|
||||||
|
|
||||||
# Create hash file now that solv creation is complete.
|
|
||||||
open(solv_file_hash, 'a').close()
|
|
||||||
self.did_update = True
|
|
||||||
|
|
||||||
return global_update
|
|
||||||
|
|
||||||
def create_weakremovers(self, target, target_config, directory, output):
|
def create_weakremovers(self, target, target_config, directory, output):
|
||||||
drops = dict()
|
drops = dict()
|
||||||
dropped_repos = dict()
|
dropped_repos = dict()
|
||||||
@ -424,10 +399,11 @@ class PkgListGen(ToolBase.ToolBase):
|
|||||||
oldsysrepo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE | solv.Repo.SUSETAGS_RECORD_SHARES)
|
oldsysrepo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE | solv.Repo.SUSETAGS_RECORD_SHARES)
|
||||||
|
|
||||||
for arch in self.all_architectures:
|
for arch in self.all_architectures:
|
||||||
for project, repo in self.repos:
|
for project, reponame in self.repos:
|
||||||
fn = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
fn = 'repo-{}-{}-{}.solv'.format(project, reponame, arch)
|
||||||
r = pool.add_repo('/'.join([project, repo]))
|
repo = pool.add_repo('/'.join([project, reponame]))
|
||||||
r.add_solv(fn)
|
if not repo.add_solv(fn):
|
||||||
|
raise Exception('failed to add repo {}/{}/{}.'.format(project, reponame, arch))
|
||||||
|
|
||||||
pool.createwhatprovides()
|
pool.createwhatprovides()
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user