2017-08-29 18:20:58 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright (c) 2017 SUSE LLC
|
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
|
|
|
|
2017-09-04 15:39:52 +02:00
|
|
|
# TODO: implement equivalent of namespace namespace:language(de) @SYSTEM
|
|
|
|
# TODO: solve all devel packages to include
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
from lxml import etree as ET
|
|
|
|
from collections import namedtuple
|
|
|
|
import sys
|
|
|
|
import cmdln
|
|
|
|
import logging
|
|
|
|
import urllib2
|
|
|
|
import osc.core
|
|
|
|
import glob
|
|
|
|
import solv
|
|
|
|
from pprint import pprint, pformat
|
|
|
|
import os
|
2017-08-31 16:05:07 +02:00
|
|
|
import subprocess
|
2017-09-04 17:47:12 +02:00
|
|
|
import re
|
2017-09-06 10:48:47 +02:00
|
|
|
import yaml
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
import ToolBase
|
|
|
|
|
2017-09-04 13:28:39 +02:00
|
|
|
# share header cache with repochecker
|
|
|
|
from osclib.memoize import CACHEDIR
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
logger = logging.getLogger()
|
|
|
|
|
|
|
|
FACTORY = "SUSE:SLE-15:GA"
|
2017-09-07 14:45:37 +02:00
|
|
|
ARCHITECTURES = ('x86_64', 'ppc64le', 's390x', 'aarch64')
|
2017-08-31 16:05:07 +02:00
|
|
|
APIURL = 'https://api.suse.de/public/'
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-07 14:45:37 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
class Group(object):
|
|
|
|
|
|
|
|
def __init__(self, name, pkglist):
|
|
|
|
self.name = name
|
2017-09-04 17:47:12 +02:00
|
|
|
self.safe_name = re.sub(r'\W', '_', name.lower())
|
2017-08-29 18:20:58 +02:00
|
|
|
self.pkglist = pkglist
|
|
|
|
self.conditional = None
|
|
|
|
self.packages = dict()
|
2017-08-31 17:09:50 +02:00
|
|
|
self.locked = dict()
|
2017-08-29 18:20:58 +02:00
|
|
|
self.solved_packages = None
|
|
|
|
self.solved = False
|
|
|
|
self.base = None
|
2017-08-31 17:09:50 +02:00
|
|
|
self.missing = None
|
2017-09-04 15:39:52 +02:00
|
|
|
self.srcpkgs = None
|
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
pkglist.groups[self.safe_name] = self
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-07 14:45:37 +02:00
|
|
|
def _verify_solved(self):
|
2017-08-29 18:20:58 +02:00
|
|
|
if not self.solved:
|
|
|
|
raise Exception('group {} not solved'.format(self.name))
|
|
|
|
|
2017-09-07 14:45:37 +02:00
|
|
|
def merge_solved_group(self, group):
|
|
|
|
group._verify_solved()
|
|
|
|
self._verify_solved()
|
|
|
|
|
|
|
|
for arch in group.solved_packages.keys():
|
|
|
|
packages = self.solved_packages.get(arch, set())
|
|
|
|
packages = group.solved_packages[arch] | packages
|
|
|
|
self.solved_packages[arch] = packages
|
|
|
|
|
|
|
|
# remove packages duplicated
|
|
|
|
generic_packages = self.solved_packages.get('*', set())
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
arch_packages = self.solved_packages.get(arch, set())
|
|
|
|
self.solved_packages[arch] = arch_packages - generic_packages
|
|
|
|
print self.solved_packages
|
|
|
|
|
|
|
|
def get_solved_packages_recursive(self, arch):
|
|
|
|
self._verify_solved()
|
|
|
|
|
2017-09-06 10:48:47 +02:00
|
|
|
solved = self.solved_packages.get('*', set())
|
|
|
|
if arch in self.solved_packages:
|
|
|
|
solved |= self.solved_packages[arch]
|
2017-09-07 14:45:37 +02:00
|
|
|
logger.debug(
|
|
|
|
"{}.{} have {} packages".format(self.name, arch, len(solved)))
|
2017-08-29 18:20:58 +02:00
|
|
|
if self.base:
|
|
|
|
for b in self.base:
|
|
|
|
solved |= b.get_solved_packages_recursive(arch)
|
|
|
|
|
|
|
|
return solved
|
|
|
|
|
2017-09-01 14:36:56 +02:00
|
|
|
def get_packages_recursive(self, arch):
|
|
|
|
packages = set()
|
|
|
|
if '*' in self.packages:
|
|
|
|
packages.update(self.packages['*'])
|
|
|
|
if arch in self.packages:
|
|
|
|
packages.update(self.packages[arch])
|
2017-09-07 14:45:37 +02:00
|
|
|
logger.debug(
|
|
|
|
"{}.{} have {} packages".format(self.name, arch, len(packages)))
|
2017-09-01 14:36:56 +02:00
|
|
|
if self.base:
|
|
|
|
for b in self.base:
|
|
|
|
packages |= b.get_packages_recursive(arch)
|
|
|
|
|
|
|
|
return packages
|
|
|
|
|
2017-09-07 14:45:37 +02:00
|
|
|
def solve(self, base=None, extra=None, without=None, ignore_recommended=True):
|
2017-08-29 18:20:58 +02:00
|
|
|
""" base: list of base groups or None """
|
|
|
|
|
|
|
|
if self.solved:
|
|
|
|
return
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
if isinstance(base, Group):
|
2017-09-07 14:45:37 +02:00
|
|
|
base = [base]
|
2017-08-31 16:05:07 +02:00
|
|
|
if not (base is None or isinstance(base, list) or isinstance(base, tuple)):
|
|
|
|
raise Exception("base must be list but is {}".format(type(base)))
|
2017-09-01 15:34:07 +02:00
|
|
|
if extra:
|
|
|
|
if isinstance(extra, str):
|
|
|
|
extra = set((extra))
|
|
|
|
elif not (isinstance(extra, list) or isinstance(extra, tuple)):
|
2017-09-07 14:45:37 +02:00
|
|
|
raise Exception(
|
|
|
|
"extra must be list but is {}".format(type(extra)))
|
2017-09-01 15:34:07 +02:00
|
|
|
extra = set(extra)
|
|
|
|
if without:
|
|
|
|
if isinstance(without, str):
|
|
|
|
without = set([without])
|
|
|
|
elif not (isinstance(without, list) or isinstance(without, tuple)):
|
2017-09-07 14:45:37 +02:00
|
|
|
raise Exception(
|
|
|
|
"without must be list but is {}".format(type(without)))
|
2017-09-01 15:34:07 +02:00
|
|
|
without = set(without)
|
2017-08-31 16:05:07 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
solved = dict()
|
2017-08-31 17:09:50 +02:00
|
|
|
missing = dict()
|
2017-09-07 14:45:37 +02:00
|
|
|
srcpkgs = set()
|
2017-08-29 18:20:58 +02:00
|
|
|
for arch in ARCHITECTURES:
|
2017-09-04 14:06:15 +02:00
|
|
|
pool = self.pkglist._prepare_pool(arch)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
jobs = []
|
|
|
|
toinstall = set(self.packages['*'])
|
2017-08-31 17:09:50 +02:00
|
|
|
locked = set(self.locked.get('*', ()))
|
2017-08-29 18:20:58 +02:00
|
|
|
basepackages = set()
|
2017-09-01 14:36:56 +02:00
|
|
|
basepackages_solved = set()
|
2017-09-07 14:45:37 +02:00
|
|
|
logger.debug(
|
|
|
|
"{}: {} common packages".format(self.name, len(toinstall)))
|
2017-08-29 18:20:58 +02:00
|
|
|
if arch in self.packages:
|
2017-09-07 14:45:37 +02:00
|
|
|
logger.debug(
|
|
|
|
"{}: {} {} packages".format(self.name, arch, len(self.packages[arch])))
|
2017-08-29 18:20:58 +02:00
|
|
|
toinstall |= self.packages[arch]
|
2017-08-31 17:09:50 +02:00
|
|
|
if arch in self.locked:
|
|
|
|
locked |= self.locked[arch]
|
2017-08-29 18:20:58 +02:00
|
|
|
if base:
|
|
|
|
for b in base:
|
2017-09-07 14:45:37 +02:00
|
|
|
logger.debug(
|
|
|
|
"{} adding packges from {}".format(self.name, b.name))
|
2017-09-01 14:36:56 +02:00
|
|
|
basepackages |= b.get_packages_recursive(arch)
|
2017-09-07 14:45:37 +02:00
|
|
|
basepackages_solved |= b.get_solved_packages_recursive(
|
|
|
|
arch)
|
2017-09-04 17:47:12 +02:00
|
|
|
self.base = list(base)
|
2017-09-01 15:34:07 +02:00
|
|
|
if without:
|
|
|
|
basepackages -= without
|
2017-08-29 18:20:58 +02:00
|
|
|
toinstall |= basepackages
|
2017-09-01 15:34:07 +02:00
|
|
|
if extra:
|
|
|
|
toinstall.update(extra)
|
2017-08-29 18:20:58 +02:00
|
|
|
for n in toinstall:
|
|
|
|
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
|
|
|
if sel.isempty():
|
2017-09-07 14:45:37 +02:00
|
|
|
logger.error(
|
|
|
|
'{}.{}: package {} not found'.format(self.name, arch, n))
|
2017-08-31 17:09:50 +02:00
|
|
|
missing.setdefault(arch, set()).add(n)
|
|
|
|
else:
|
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
|
|
|
|
|
|
|
for n in locked:
|
|
|
|
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
|
|
|
if sel.isempty():
|
2017-09-07 14:45:37 +02:00
|
|
|
logger.warn(
|
|
|
|
'{}.{}: locked package {} not found'.format(self.name, arch, n))
|
2017-08-31 17:09:50 +02:00
|
|
|
else:
|
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_LOCK)
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
solver = pool.Solver()
|
2017-09-06 10:48:47 +02:00
|
|
|
if ignore_recommended:
|
2017-09-07 14:45:37 +02:00
|
|
|
solver.set_flag(solver.SOLVER_FLAG_IGNORE_RECOMMENDED, 1)
|
2017-09-06 10:48:47 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
problems = solver.solve(jobs)
|
|
|
|
if problems:
|
|
|
|
for problem in problems:
|
|
|
|
# just ignore conflicts here
|
2017-09-07 14:45:37 +02:00
|
|
|
# if not ' conflicts with ' in str(problem):
|
|
|
|
logger.error(
|
|
|
|
'unresolvable: %s.%s: %s', self.name, arch, problem)
|
|
|
|
# logger.warning(problem)
|
2017-09-04 17:47:12 +02:00
|
|
|
break
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
trans = solver.transaction()
|
|
|
|
if trans.isempty():
|
2017-09-04 17:47:12 +02:00
|
|
|
logger.error('%s.%s: nothing to do', self.name, arch)
|
|
|
|
break
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-04 15:39:52 +02:00
|
|
|
for s in trans.newsolvables():
|
|
|
|
solved.setdefault(arch, set()).add(s.name)
|
2017-09-07 14:45:37 +02:00
|
|
|
reason, rule = solver.describe_decision(s)
|
|
|
|
#if rule:
|
|
|
|
# print(s.name, reason, rule.info().problemstr())
|
2017-09-04 15:39:52 +02:00
|
|
|
# don't ask me why, but that's how it seems to work
|
|
|
|
if s.lookup_void(solv.SOLVABLE_SOURCENAME):
|
|
|
|
src = s.name
|
|
|
|
else:
|
|
|
|
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
|
|
|
|
srcpkgs.add(src)
|
|
|
|
|
2017-09-01 14:36:56 +02:00
|
|
|
if basepackages_solved:
|
|
|
|
solved[arch] -= basepackages_solved
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-01 15:34:07 +02:00
|
|
|
if extra:
|
2017-09-04 15:39:52 +02:00
|
|
|
solved[arch] -= extra
|
2017-09-01 15:34:07 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
common = None
|
2017-08-31 17:09:50 +02:00
|
|
|
missing_common = None
|
2017-08-29 18:20:58 +02:00
|
|
|
# compute common packages across all architectures
|
|
|
|
for arch in solved.keys():
|
|
|
|
if common is None:
|
|
|
|
common = set(solved[arch])
|
|
|
|
continue
|
|
|
|
common &= solved[arch]
|
2017-08-31 17:09:50 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
if common is None:
|
|
|
|
common = set()
|
|
|
|
|
2017-08-31 17:09:50 +02:00
|
|
|
for arch in missing.keys():
|
|
|
|
if missing_common is None:
|
|
|
|
missing_common = set(missing[arch])
|
|
|
|
continue
|
|
|
|
missing_common &= missing[arch]
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
# reduce arch specific set by common ones
|
|
|
|
for arch in solved.keys():
|
|
|
|
solved[arch] -= common
|
|
|
|
|
2017-08-31 17:09:50 +02:00
|
|
|
for arch in missing.keys():
|
|
|
|
missing[arch] -= missing_common
|
|
|
|
|
|
|
|
self.missing = missing
|
|
|
|
if missing_common:
|
|
|
|
self.missing['*'] = missing_common
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
self.solved_packages = solved
|
|
|
|
self.solved_packages['*'] = common
|
|
|
|
|
|
|
|
self.solved = True
|
2017-09-04 15:39:52 +02:00
|
|
|
self.srcpkgs = srcpkgs
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def architectures(self):
|
|
|
|
return self.solved_packages.keys()
|
|
|
|
|
|
|
|
def toxml(self, arch):
|
|
|
|
|
|
|
|
packages = None
|
|
|
|
autodeps = None
|
|
|
|
|
|
|
|
if arch in self.solved_packages:
|
|
|
|
autodeps = self.solved_packages[arch]
|
|
|
|
|
|
|
|
if arch in self.packages:
|
|
|
|
packages = self.packages[arch]
|
|
|
|
if autodeps:
|
|
|
|
autodeps -= self.packages[arch]
|
|
|
|
|
|
|
|
name = self.name
|
|
|
|
if arch != '*':
|
|
|
|
name += '.' + arch
|
|
|
|
|
2017-09-07 14:45:37 +02:00
|
|
|
root = ET.Element('group', {'name': name})
|
2017-08-29 18:20:58 +02:00
|
|
|
c = ET.Comment(' ### AUTOMATICALLY GENERATED, DO NOT EDIT ### ')
|
|
|
|
root.append(c)
|
|
|
|
|
|
|
|
if self.base:
|
2017-09-04 13:28:39 +02:00
|
|
|
c = ET.Comment(' based on {} '.format(', '.join([b.name for b in self.base])))
|
|
|
|
root.append(c)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
if arch != '*':
|
|
|
|
cond = ET.SubElement(root, 'conditional', {'name': 'only_{}'.format(arch)})
|
|
|
|
packagelist = ET.SubElement(root, 'packagelist', {'relationship': 'recommends'})
|
|
|
|
|
|
|
|
if packages:
|
|
|
|
for name in sorted(packages):
|
2017-08-31 17:09:50 +02:00
|
|
|
if arch in self.missing and name in self.missing[arch]:
|
|
|
|
c = ET.Comment(' missing {} '.format(name))
|
|
|
|
packagelist.append(c)
|
|
|
|
else:
|
2017-09-04 15:39:52 +02:00
|
|
|
status = self.pkglist.supportstatus(name)
|
|
|
|
if status:
|
|
|
|
p = ET.SubElement(packagelist, 'package', {
|
2017-09-07 14:45:37 +02:00
|
|
|
'name': name,
|
|
|
|
'supportstatus': status
|
|
|
|
})
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
if autodeps:
|
|
|
|
c = ET.Comment(' automatic dependencies ')
|
|
|
|
packagelist.append(c)
|
|
|
|
|
|
|
|
for name in sorted(autodeps):
|
2017-09-04 15:39:52 +02:00
|
|
|
status = self.pkglist.supportstatus(name)
|
|
|
|
if status:
|
|
|
|
p = ET.SubElement(packagelist, 'package', {
|
2017-09-07 14:45:37 +02:00
|
|
|
'name': name,
|
|
|
|
'supportstatus': self.pkglist.supportstatus(name)
|
|
|
|
})
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
return root
|
|
|
|
|
|
|
|
def dump(self):
|
2017-09-07 14:45:37 +02:00
|
|
|
archs = ('*',) + ARCHITECTURES
|
2017-09-06 20:20:21 +02:00
|
|
|
for arch in archs:
|
2017-08-29 18:20:58 +02:00
|
|
|
x = self.toxml(arch)
|
2017-09-07 14:45:37 +02:00
|
|
|
print(ET.tostring(x, pretty_print=True))
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
class PkgListGen(ToolBase.ToolBase):
|
|
|
|
|
|
|
|
def __init__(self, project):
|
|
|
|
ToolBase.ToolBase.__init__(self)
|
|
|
|
self.project = project
|
|
|
|
# package -> supportatus
|
|
|
|
self.packages = dict()
|
|
|
|
self.default_support_status = 'l3'
|
|
|
|
self.groups = dict()
|
|
|
|
self._supportstatus = None
|
2017-09-04 13:28:39 +02:00
|
|
|
self.input_dir = '.'
|
|
|
|
self.output_dir = '.'
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def _dump_supportstatus(self):
|
|
|
|
for name in self.packages.keys():
|
|
|
|
for status in self.packages[name]:
|
|
|
|
if status == self.default_support_status:
|
|
|
|
continue
|
|
|
|
for group in self.packages[name][status]:
|
2017-09-04 17:47:12 +02:00
|
|
|
print name, status
|
2017-08-31 16:05:07 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def _load_supportstatus(self):
|
|
|
|
# XXX
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.input_dir, 'supportstatus.txt'), 'r') as fh:
|
2017-08-29 18:20:58 +02:00
|
|
|
self._supportstatus = dict()
|
2017-09-04 15:39:52 +02:00
|
|
|
for l in fh:
|
2017-09-04 17:47:12 +02:00
|
|
|
# pkg, status
|
2017-09-04 15:39:52 +02:00
|
|
|
a = l.rstrip().split(' ')
|
2017-09-04 17:47:12 +02:00
|
|
|
if len(a) > 1:
|
2017-09-06 10:48:47 +02:00
|
|
|
self._supportstatus[a[0]] = a[1]
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
# TODO: make per product
|
|
|
|
def supportstatus(self, package):
|
|
|
|
if self._supportstatus is None:
|
|
|
|
self._load_supportstatus()
|
|
|
|
|
|
|
|
if package in self._supportstatus:
|
|
|
|
return self._supportstatus[package]
|
|
|
|
else:
|
|
|
|
return self.default_support_status
|
|
|
|
|
|
|
|
# XXX: move to group class. just here to check supportstatus
|
2017-09-06 10:48:47 +02:00
|
|
|
def _parse_group(self, groupname, packages):
|
2017-08-29 18:20:58 +02:00
|
|
|
group = Group(groupname, self)
|
2017-09-06 10:48:47 +02:00
|
|
|
for package in packages:
|
|
|
|
if isinstance(package, dict):
|
|
|
|
name = package.keys()[0]
|
|
|
|
for rel in package[name]:
|
|
|
|
if rel == 'locks':
|
|
|
|
group.locked.setdefault('*', set()).add(name)
|
|
|
|
else:
|
|
|
|
group.packages.setdefault(rel, set()).add(name)
|
|
|
|
else:
|
|
|
|
group.packages.setdefault('*', set()).add(package)
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
return group
|
|
|
|
|
|
|
|
def _load_group_file(self, fn):
|
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
logger.debug("reading %s", fn)
|
2017-09-06 10:48:47 +02:00
|
|
|
for groupname, group in yaml.safe_load(fh).items():
|
|
|
|
g = self._parse_group(groupname, group)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def load_all_groups(self):
|
2017-09-06 10:48:47 +02:00
|
|
|
for fn in glob.glob(os.path.join(self.input_dir, 'group*.yml')):
|
2017-08-29 18:20:58 +02:00
|
|
|
self._load_group_file(fn)
|
|
|
|
|
|
|
|
def _write_all_groups(self):
|
2017-09-04 14:06:15 +02:00
|
|
|
self._check_supplements()
|
2017-09-06 20:20:21 +02:00
|
|
|
archs = ('*',) + ARCHITECTURES
|
2017-08-29 18:20:58 +02:00
|
|
|
for name in self.groups:
|
|
|
|
group = self.groups[name]
|
2017-09-04 17:47:12 +02:00
|
|
|
fn = '{}.group'.format(group.name)
|
2017-08-29 18:20:58 +02:00
|
|
|
if not group.solved:
|
|
|
|
logger.error('{} not solved'.format(name))
|
2017-09-04 13:28:39 +02:00
|
|
|
if os.path.exists(fn):
|
|
|
|
os.unlink(fn)
|
2017-08-29 18:20:58 +02:00
|
|
|
continue
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
2017-09-06 20:20:21 +02:00
|
|
|
for arch in archs:
|
2017-09-04 13:28:39 +02:00
|
|
|
x = group.toxml(arch)
|
2017-09-07 14:45:37 +02:00
|
|
|
# fh.write(ET.tostring(x, pretty_print = True, doctype = '<?xml version="1.0" encoding="UTF-8"?>'))
|
|
|
|
fh.write(ET.tostring(x, pretty_print=True))
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _parse_product(self, root):
|
|
|
|
print(root.find('.//products/product/name').text)
|
|
|
|
for mnode in root.findall(".//mediasets/media"):
|
|
|
|
name = mnode.get('name')
|
|
|
|
print(' {}'.format(name))
|
|
|
|
for node in mnode.findall(".//use"):
|
|
|
|
print(' {}'.format(node.get('group')))
|
|
|
|
|
|
|
|
def list_products(self):
|
|
|
|
for fn in glob.glob('*.product'):
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.input_dir, fn), 'r') as fh:
|
2017-08-29 18:20:58 +02:00
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
root = ET.parse(fh).getroot()
|
|
|
|
self._parse_product(root)
|
|
|
|
|
|
|
|
def solve_group(self, name):
|
|
|
|
self._load_all_groups()
|
|
|
|
group = self.groups[name]
|
|
|
|
group.solve()
|
|
|
|
return group
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
def _check_supplements(self):
|
|
|
|
tocheck = set()
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
sel.add_raw(solv.Job.SOLVER_SOLVABLE, s.id)
|
|
|
|
|
|
|
|
for s in sel.solvables():
|
|
|
|
for dep in s.lookup_deparray(solv.SOLVABLE_SUPPLEMENTS):
|
|
|
|
for d in dep.str().split(' '):
|
|
|
|
if d.startswith('namespace:modalias') or d.startswith('namespace:filesystem'):
|
|
|
|
tocheck.add(s.name)
|
|
|
|
|
|
|
|
all_grouped = set()
|
|
|
|
for g in self.groups.values():
|
|
|
|
if g.solved:
|
|
|
|
for arch in g.solved_packages.keys():
|
2017-09-04 15:39:52 +02:00
|
|
|
if g.solved_packages[arch]:
|
|
|
|
all_grouped.update(g.solved_packages[arch])
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
for p in tocheck - all_grouped:
|
|
|
|
logger.warn('package %s has supplements but is not grouped', p)
|
|
|
|
|
|
|
|
def _prepare_pool(self, arch):
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch(arch)
|
|
|
|
|
|
|
|
# XXX
|
|
|
|
repo = pool.add_repo(FACTORY)
|
|
|
|
r = repo.add_solv(os.path.join(CACHEDIR, 'repo-{}-standard-{}.solv'.format(FACTORY, arch)))
|
|
|
|
if not r:
|
|
|
|
raise Exception("failed to add repo. Need to run update first?")
|
|
|
|
|
|
|
|
pool.addfileprovides()
|
|
|
|
pool.createwhatprovides()
|
|
|
|
|
|
|
|
return pool
|
2017-09-04 15:39:52 +02:00
|
|
|
|
|
|
|
def _collect_devel_packages(self):
|
|
|
|
srcpkgs = set()
|
|
|
|
for g in self.groups.values():
|
|
|
|
if g.srcpkgs:
|
|
|
|
srcpkgs.update(g.srcpkgs)
|
|
|
|
|
|
|
|
develpkgs = dict()
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
if s.name.endswith('-devel'):
|
|
|
|
# don't ask me why, but that's how it seems to work
|
|
|
|
if s.lookup_void(solv.SOLVABLE_SOURCENAME):
|
|
|
|
src = s.name
|
|
|
|
else:
|
|
|
|
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
|
|
|
|
|
|
|
|
if src in srcpkgs:
|
|
|
|
develpkgs.setdefault(arch, set()).add(s.name)
|
|
|
|
|
|
|
|
common = None
|
|
|
|
# compute common packages across all architectures
|
|
|
|
for arch in develpkgs.keys():
|
|
|
|
if common is None:
|
|
|
|
common = set(develpkgs[arch])
|
|
|
|
continue
|
|
|
|
common &= develpkgs[arch]
|
|
|
|
|
|
|
|
# reduce arch specific set by common ones
|
|
|
|
for arch in develpkgs.keys():
|
|
|
|
develpkgs[arch] -= common
|
|
|
|
|
|
|
|
develpkgs['*'] = common
|
|
|
|
|
|
|
|
g = Group('all-devel-pkgs', self)
|
|
|
|
# XXX: would need to add to packages instead, then solve and
|
|
|
|
# subtract all other groups
|
|
|
|
g.solved_packages = develpkgs
|
|
|
|
g.solved = True
|
|
|
|
|
2017-09-04 17:18:56 +02:00
|
|
|
def _collect_unsorted_packages(self):
|
|
|
|
|
|
|
|
packages = dict()
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
|
|
|
p = set([s.name for s in
|
2017-09-07 14:45:37 +02:00
|
|
|
pool.solvables_iter() if not
|
|
|
|
(s.name.endswith('-debuginfo') or
|
|
|
|
s.name.endswith('-debugsource'))])
|
2017-09-04 17:18:56 +02:00
|
|
|
|
|
|
|
for g in self.groups.values():
|
|
|
|
if g.solved:
|
|
|
|
for a in ('*', arch):
|
|
|
|
if a in g.solved_packages:
|
|
|
|
p -= g.solved_packages[a]
|
|
|
|
packages[arch] = p
|
|
|
|
|
|
|
|
common = None
|
|
|
|
# compute common packages across all architectures
|
|
|
|
for arch in packages.keys():
|
|
|
|
if common is None:
|
|
|
|
common = set(packages[arch])
|
|
|
|
continue
|
|
|
|
common &= packages[arch]
|
|
|
|
|
|
|
|
# reduce arch specific set by common ones
|
|
|
|
for arch in packages.keys():
|
|
|
|
packages[arch] -= common
|
|
|
|
|
|
|
|
packages['*'] = common
|
|
|
|
|
|
|
|
g = Group('unsorted', self)
|
|
|
|
g.solved_packages = packages
|
|
|
|
g.solved = True
|
|
|
|
|
2017-09-06 10:48:47 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
class CommandLineInterface(ToolBase.CommandLineInterface):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToolBase.CommandLineInterface.__init__(self, args, kwargs)
|
|
|
|
|
|
|
|
def get_optparser(self):
|
|
|
|
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
|
|
|
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
2017-09-07 14:45:37 +02:00
|
|
|
help='project to process (default: %s)' % FACTORY,
|
|
|
|
default=FACTORY)
|
2017-09-04 13:28:39 +02:00
|
|
|
parser.add_option('-i', '--input-dir', dest='input_dir', metavar='DIR',
|
2017-09-07 14:45:37 +02:00
|
|
|
help='input directory', default='.')
|
2017-09-04 13:28:39 +02:00
|
|
|
parser.add_option('-o', '--output-dir', dest='output_dir', metavar='DIR',
|
2017-09-07 14:45:37 +02:00
|
|
|
help='input directory', default='.')
|
2017-08-29 18:20:58 +02:00
|
|
|
return parser
|
|
|
|
|
|
|
|
def setup_tool(self):
|
|
|
|
tool = PkgListGen(self.options.project)
|
2017-09-04 13:28:39 +02:00
|
|
|
tool.input_dir = self.options.input_dir
|
|
|
|
tool.output_dir = self.options.output_dir
|
2017-08-29 18:20:58 +02:00
|
|
|
return tool
|
|
|
|
|
|
|
|
def do_list(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all groups
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
|
|
|
|
for name in sorted(self.tool.groups.keys()):
|
|
|
|
print name
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
# to be called only once to bootstrap
|
|
|
|
def do_dump_supportstatus(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: dump supportstatus of input files
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
self.tool._dump_supportstatus()
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def do_list_products(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all products
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.list_products()
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def do_update(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
bs_mirrorfull = os.path.join(os.path.dirname(__file__), 'bs_mirrorfull')
|
|
|
|
repo = 'standard'
|
|
|
|
project = FACTORY
|
|
|
|
for arch in ARCHITECTURES:
|
2017-09-04 13:28:39 +02:00
|
|
|
d = os.path.join(CACHEDIR, 'repo-{}-{}-{}'.format(project, repo, arch))
|
2017-08-31 16:05:07 +02:00
|
|
|
logger.debug('updating %s', d)
|
2017-09-07 14:45:37 +02:00
|
|
|
subprocess.call(
|
|
|
|
[bs_mirrorfull, '{}/build/{}/{}/{}'.format(APIURL, project, repo, arch), d])
|
|
|
|
files = [os.path.join(d, f) for f in os.listdir(d) if f.endswith('.rpm')]
|
|
|
|
fh = open(d + '.solv', 'w')
|
|
|
|
p = subprocess.Popen(['rpms2solv', '-m', '-', '-0'], stdin=subprocess.PIPE, stdout=fh)
|
2017-08-31 16:05:07 +02:00
|
|
|
p.communicate('\0'.join(files))
|
|
|
|
p.wait()
|
|
|
|
fh.close()
|
|
|
|
|
|
|
|
def do_solve(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
self._solve()
|
2017-08-31 17:09:50 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
# sle_base.dump()
|
2017-09-01 16:08:47 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
self.tool._collect_devel_packages()
|
|
|
|
self.tool._collect_unsorted_packages()
|
|
|
|
self.tool._write_all_groups()
|
2017-09-01 16:08:47 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
def _solve(self):
|
|
|
|
""" imlement this"""
|
2017-09-01 15:34:07 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
class G(object):
|
|
|
|
True
|
2017-08-31 17:09:50 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
g = G()
|
2017-08-31 17:09:50 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
for group in self.tool.groups.values():
|
|
|
|
setattr(g, group.safe_name, group)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
raise Exception('implement me in subclass')
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
app = CommandLineInterface()
|
2017-09-07 14:45:37 +02:00
|
|
|
sys.exit(app.main())
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
# vim: sw=4 et
|