2017-08-29 18:20:58 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright (c) 2017 SUSE LLC
|
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
|
|
|
|
2017-09-04 15:39:52 +02:00
|
|
|
# TODO: implement equivalent of namespace namespace:language(de) @SYSTEM
|
|
|
|
# TODO: solve all devel packages to include
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
from lxml import etree as ET
|
|
|
|
from collections import namedtuple
|
|
|
|
import sys
|
|
|
|
import cmdln
|
|
|
|
import logging
|
|
|
|
import urllib2
|
|
|
|
import osc.core
|
|
|
|
import glob
|
|
|
|
import solv
|
|
|
|
from pprint import pprint, pformat
|
|
|
|
import os
|
2017-08-31 16:05:07 +02:00
|
|
|
import subprocess
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
import ToolBase
|
|
|
|
|
2017-09-04 13:28:39 +02:00
|
|
|
# share header cache with repochecker
|
|
|
|
from osclib.memoize import CACHEDIR
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
logger = logging.getLogger()
|
|
|
|
|
|
|
|
FACTORY = "SUSE:SLE-15:GA"
|
|
|
|
ARCHITECTURES = ('x86_64', 'ppc64le', 's390x')
|
2017-08-31 16:05:07 +02:00
|
|
|
APIURL = 'https://api.suse.de/public/'
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
class Group(object):
|
|
|
|
|
|
|
|
def __init__(self, name, pkglist):
|
|
|
|
self.name = name
|
|
|
|
self.pkglist = pkglist
|
|
|
|
self.conditional = None
|
|
|
|
self.packages = dict()
|
2017-08-31 17:09:50 +02:00
|
|
|
self.locked = dict()
|
2017-08-29 18:20:58 +02:00
|
|
|
self.solved_packages = None
|
|
|
|
self.solved = False
|
|
|
|
self.base = None
|
2017-08-31 17:09:50 +02:00
|
|
|
self.missing = None
|
2017-09-04 15:39:52 +02:00
|
|
|
self.srcpkgs = None
|
|
|
|
|
|
|
|
pkglist.groups[name] = self
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def get_solved_packages_recursive(self, arch):
|
|
|
|
if not self.solved:
|
|
|
|
raise Exception('group {} not solved'.format(self.name))
|
|
|
|
|
|
|
|
solved = self.solved_packages['*'] | self.solved_packages[arch]
|
|
|
|
logger.debug("{}.{} have {} packages".format(self.name, arch, len(solved)))
|
|
|
|
if self.base:
|
|
|
|
for b in self.base:
|
|
|
|
solved |= b.get_solved_packages_recursive(arch)
|
|
|
|
|
|
|
|
return solved
|
|
|
|
|
2017-09-01 14:36:56 +02:00
|
|
|
def get_packages_recursive(self, arch):
|
|
|
|
packages = set()
|
|
|
|
if '*' in self.packages:
|
|
|
|
packages.update(self.packages['*'])
|
|
|
|
if arch in self.packages:
|
|
|
|
packages.update(self.packages[arch])
|
|
|
|
logger.debug("{}.{} have {} packages".format(self.name, arch, len(packages)))
|
|
|
|
if self.base:
|
|
|
|
for b in self.base:
|
|
|
|
packages |= b.get_packages_recursive(arch)
|
|
|
|
|
|
|
|
return packages
|
|
|
|
|
2017-09-01 15:34:07 +02:00
|
|
|
def solve(self, base = None, extra = None, without = None):
|
2017-08-29 18:20:58 +02:00
|
|
|
""" base: list of base groups or None """
|
|
|
|
|
|
|
|
if self.solved:
|
|
|
|
return
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
if isinstance(base, Group):
|
|
|
|
base = [ base ]
|
|
|
|
if not (base is None or isinstance(base, list) or isinstance(base, tuple)):
|
|
|
|
raise Exception("base must be list but is {}".format(type(base)))
|
2017-09-01 15:34:07 +02:00
|
|
|
if extra:
|
|
|
|
if isinstance(extra, str):
|
|
|
|
extra = set((extra))
|
|
|
|
elif not (isinstance(extra, list) or isinstance(extra, tuple)):
|
|
|
|
raise Exception("extra must be list but is {}".format(type(extra)))
|
|
|
|
extra = set(extra)
|
|
|
|
if without:
|
|
|
|
if isinstance(without, str):
|
|
|
|
without = set([without])
|
|
|
|
elif not (isinstance(without, list) or isinstance(without, tuple)):
|
|
|
|
raise Exception("without must be list but is {}".format(type(without)))
|
|
|
|
without = set(without)
|
2017-08-31 16:05:07 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
solved = dict()
|
2017-08-31 17:09:50 +02:00
|
|
|
missing = dict()
|
2017-09-04 15:39:52 +02:00
|
|
|
srcpkgs = set()
|
2017-08-29 18:20:58 +02:00
|
|
|
for arch in ARCHITECTURES:
|
2017-09-04 14:06:15 +02:00
|
|
|
pool = self.pkglist._prepare_pool(arch)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
jobs = []
|
|
|
|
toinstall = set(self.packages['*'])
|
2017-08-31 17:09:50 +02:00
|
|
|
locked = set(self.locked.get('*', ()))
|
2017-08-29 18:20:58 +02:00
|
|
|
basepackages = set()
|
2017-09-01 14:36:56 +02:00
|
|
|
basepackages_solved = set()
|
2017-08-29 18:20:58 +02:00
|
|
|
logger.debug("{}: {} common packages".format(self.name, len(toinstall)))
|
|
|
|
if arch in self.packages:
|
|
|
|
logger.debug("{}: {} {} packages".format(self.name, arch, len(self.packages[arch])))
|
|
|
|
toinstall |= self.packages[arch]
|
2017-08-31 17:09:50 +02:00
|
|
|
if arch in self.locked:
|
|
|
|
locked |= self.locked[arch]
|
2017-08-29 18:20:58 +02:00
|
|
|
if base:
|
|
|
|
for b in base:
|
|
|
|
logger.debug("{} adding packges from {}".format(self.name, b.name))
|
2017-09-01 14:36:56 +02:00
|
|
|
basepackages |= b.get_packages_recursive(arch)
|
|
|
|
basepackages_solved |= b.get_solved_packages_recursive(arch)
|
2017-09-01 15:34:07 +02:00
|
|
|
if without:
|
|
|
|
basepackages -= without
|
2017-08-29 18:20:58 +02:00
|
|
|
toinstall |= basepackages
|
2017-09-01 15:34:07 +02:00
|
|
|
if extra:
|
|
|
|
toinstall.update(extra)
|
2017-08-29 18:20:58 +02:00
|
|
|
for n in toinstall:
|
|
|
|
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
|
|
|
if sel.isempty():
|
|
|
|
logger.error('{}.{}: package {} not found'.format(self.name, arch, n))
|
2017-08-31 17:09:50 +02:00
|
|
|
missing.setdefault(arch, set()).add(n)
|
|
|
|
else:
|
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
|
|
|
|
|
|
|
for n in locked:
|
|
|
|
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
|
|
|
if sel.isempty():
|
|
|
|
logger.warn('{}.{}: locked package {} not found'.format(self.name, arch, n))
|
|
|
|
else:
|
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_LOCK)
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
solver = pool.Solver()
|
|
|
|
problems = solver.solve(jobs)
|
|
|
|
if problems:
|
|
|
|
for problem in problems:
|
|
|
|
# just ignore conflicts here
|
|
|
|
#if not ' conflicts with ' in str(problem):
|
2017-08-31 17:09:50 +02:00
|
|
|
logger.error('%s.%s: %s', self.name, arch, problem)
|
2017-08-29 18:20:58 +02:00
|
|
|
raise Exception('unresolvable')
|
|
|
|
#logger.warning(problem)
|
|
|
|
|
|
|
|
trans = solver.transaction()
|
|
|
|
if trans.isempty():
|
|
|
|
raise Exception('nothing to do')
|
|
|
|
|
2017-09-04 15:39:52 +02:00
|
|
|
for s in trans.newsolvables():
|
|
|
|
solved.setdefault(arch, set()).add(s.name)
|
|
|
|
# don't ask me why, but that's how it seems to work
|
|
|
|
if s.lookup_void(solv.SOLVABLE_SOURCENAME):
|
|
|
|
src = s.name
|
|
|
|
else:
|
|
|
|
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
|
|
|
|
srcpkgs.add(src)
|
|
|
|
|
2017-09-01 14:36:56 +02:00
|
|
|
if basepackages_solved:
|
2017-08-29 18:20:58 +02:00
|
|
|
self.base = list(base)
|
2017-09-01 14:36:56 +02:00
|
|
|
solved[arch] -= basepackages_solved
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-01 15:34:07 +02:00
|
|
|
if extra:
|
2017-09-04 15:39:52 +02:00
|
|
|
solved[arch] -= extra
|
2017-09-01 15:34:07 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
common = None
|
2017-08-31 17:09:50 +02:00
|
|
|
missing_common = None
|
2017-08-29 18:20:58 +02:00
|
|
|
# compute common packages across all architectures
|
|
|
|
for arch in solved.keys():
|
|
|
|
if common is None:
|
|
|
|
common = set(solved[arch])
|
|
|
|
continue
|
|
|
|
common &= solved[arch]
|
2017-08-31 17:09:50 +02:00
|
|
|
|
|
|
|
for arch in missing.keys():
|
|
|
|
if missing_common is None:
|
|
|
|
missing_common = set(missing[arch])
|
|
|
|
continue
|
|
|
|
missing_common &= missing[arch]
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
# reduce arch specific set by common ones
|
|
|
|
for arch in solved.keys():
|
|
|
|
solved[arch] -= common
|
|
|
|
|
2017-08-31 17:09:50 +02:00
|
|
|
for arch in missing.keys():
|
|
|
|
missing[arch] -= missing_common
|
|
|
|
|
|
|
|
self.missing = missing
|
|
|
|
if missing_common:
|
|
|
|
self.missing['*'] = missing_common
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
self.solved_packages = solved
|
|
|
|
self.solved_packages['*'] = common
|
|
|
|
|
|
|
|
self.solved = True
|
2017-09-04 15:39:52 +02:00
|
|
|
self.srcpkgs = srcpkgs
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def architectures(self):
|
|
|
|
return self.solved_packages.keys()
|
|
|
|
|
|
|
|
def toxml(self, arch):
|
|
|
|
|
|
|
|
packages = None
|
|
|
|
autodeps = None
|
|
|
|
|
|
|
|
if arch in self.solved_packages:
|
|
|
|
autodeps = self.solved_packages[arch]
|
|
|
|
|
|
|
|
if arch in self.packages:
|
|
|
|
packages = self.packages[arch]
|
|
|
|
if autodeps:
|
|
|
|
autodeps -= self.packages[arch]
|
|
|
|
|
|
|
|
if not packages and not autodeps:
|
|
|
|
return None
|
|
|
|
|
|
|
|
name = self.name
|
|
|
|
if arch != '*':
|
|
|
|
name += '.' + arch
|
|
|
|
|
|
|
|
root = ET.Element('group', { 'name' : name})
|
|
|
|
c = ET.Comment(' ### AUTOMATICALLY GENERATED, DO NOT EDIT ### ')
|
|
|
|
root.append(c)
|
|
|
|
|
|
|
|
if self.base:
|
2017-09-04 13:28:39 +02:00
|
|
|
c = ET.Comment(' based on {} '.format(', '.join([b.name for b in self.base])))
|
|
|
|
root.append(c)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
if arch != '*':
|
|
|
|
cond = ET.SubElement(root, 'conditional', {'name': 'only_{}'.format(arch)})
|
|
|
|
packagelist = ET.SubElement(root, 'packagelist', {'relationship': 'recommends'})
|
|
|
|
|
|
|
|
if packages:
|
|
|
|
for name in sorted(packages):
|
2017-08-31 17:09:50 +02:00
|
|
|
if arch in self.missing and name in self.missing[arch]:
|
|
|
|
c = ET.Comment(' missing {} '.format(name))
|
|
|
|
packagelist.append(c)
|
|
|
|
else:
|
2017-09-04 15:39:52 +02:00
|
|
|
status = self.pkglist.supportstatus(name)
|
|
|
|
if status:
|
|
|
|
p = ET.SubElement(packagelist, 'package', {
|
|
|
|
'name' : name,
|
|
|
|
'supportstatus' : status
|
|
|
|
})
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
if autodeps:
|
|
|
|
c = ET.Comment(' automatic dependencies ')
|
|
|
|
packagelist.append(c)
|
|
|
|
|
|
|
|
for name in sorted(autodeps):
|
2017-09-04 15:39:52 +02:00
|
|
|
status = self.pkglist.supportstatus(name)
|
|
|
|
if status:
|
|
|
|
p = ET.SubElement(packagelist, 'package', {
|
|
|
|
'name' : name,
|
|
|
|
'supportstatus' : self.pkglist.supportstatus(name)
|
|
|
|
})
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
return root
|
|
|
|
|
|
|
|
def dump(self):
|
|
|
|
for arch in sorted(self.architectures()):
|
|
|
|
x = self.toxml(arch)
|
|
|
|
if x is not None:
|
|
|
|
print(ET.tostring(x, pretty_print = True))
|
|
|
|
|
|
|
|
class PkgListGen(ToolBase.ToolBase):
|
|
|
|
|
|
|
|
def __init__(self, project):
|
|
|
|
ToolBase.ToolBase.__init__(self)
|
|
|
|
self.project = project
|
|
|
|
# package -> supportatus
|
|
|
|
self.packages = dict()
|
|
|
|
self.default_support_status = 'l3'
|
|
|
|
self.groups = dict()
|
|
|
|
self._supportstatus = None
|
2017-09-04 13:28:39 +02:00
|
|
|
self.input_dir = '.'
|
|
|
|
self.output_dir = '.'
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def _dump_supportstatus(self):
|
|
|
|
for name in self.packages.keys():
|
|
|
|
for status in self.packages[name]:
|
|
|
|
if status == self.default_support_status:
|
|
|
|
continue
|
|
|
|
for group in self.packages[name][status]:
|
|
|
|
print group, name, status
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def _load_supportstatus(self):
|
|
|
|
# XXX
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.input_dir, 'supportstatus.txt'), 'r') as fh:
|
2017-08-29 18:20:58 +02:00
|
|
|
self._supportstatus = dict()
|
2017-09-04 15:39:52 +02:00
|
|
|
for l in fh:
|
|
|
|
# group, pkg, status
|
|
|
|
a = l.rstrip().split(' ')
|
|
|
|
if len(a) > 2:
|
|
|
|
self._supportstatus[a[1]] = a[2]
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
# TODO: make per product
|
|
|
|
def supportstatus(self, package):
|
|
|
|
if self._supportstatus is None:
|
|
|
|
self._load_supportstatus()
|
|
|
|
|
|
|
|
if package in self._supportstatus:
|
|
|
|
return self._supportstatus[package]
|
|
|
|
else:
|
|
|
|
return self.default_support_status
|
|
|
|
|
|
|
|
# XXX: move to group class. just here to check supportstatus
|
|
|
|
def _parse_group(self, root):
|
|
|
|
groupname = root.get('name')
|
|
|
|
group = Group(groupname, self)
|
2017-08-31 17:09:50 +02:00
|
|
|
for packagelist in root.findall(".//packagelist"):
|
|
|
|
rel = packagelist.get('relationship', 'requires')
|
|
|
|
|
|
|
|
for node in packagelist.findall(".//package"):
|
|
|
|
name = node.get('name')
|
|
|
|
arch = node.get('arch', '*')
|
|
|
|
status = node.get('supportstatus', '')
|
|
|
|
logger.debug('group %s %s package %s, status %s', groupname, rel, name, status)
|
|
|
|
if rel in ('recommends', 'requires'):
|
|
|
|
self.packages.setdefault(name, dict())
|
|
|
|
self.packages[name].setdefault(status, set()).add(groupname)
|
|
|
|
if len(self.packages[name]) > 1:
|
|
|
|
logger.error("multiple supports states for {}: {}".format(name, pformat(self.packages[name])))
|
|
|
|
group.packages.setdefault(arch, set()).add(name)
|
|
|
|
elif rel == 'locks':
|
|
|
|
group.locked.setdefault(arch, set()).add(name)
|
|
|
|
else:
|
|
|
|
raise Exception('{}: unhandled relation {}'.format(groupname, rel))
|
2017-08-29 18:20:58 +02:00
|
|
|
return group
|
|
|
|
|
|
|
|
def _load_group_file(self, fn):
|
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
root = ET.parse(fh).getroot()
|
2017-08-31 16:05:07 +02:00
|
|
|
g = self._parse_group(root)
|
|
|
|
|
|
|
|
def _load_groups_file(self, fn):
|
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
xml = '<groups>' + ''.join(fh.readlines()) + '</groups>'
|
|
|
|
root = ET.fromstring(xml)
|
|
|
|
for groupnode in root.findall("./group"):
|
|
|
|
g = self._parse_group(groupnode)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def load_all_groups(self):
|
2017-09-04 13:28:39 +02:00
|
|
|
for fn in glob.glob(os.path.join(self.input_dir, '*.group.in')):
|
2017-08-29 18:20:58 +02:00
|
|
|
self._load_group_file(fn)
|
2017-09-04 13:28:39 +02:00
|
|
|
for fn in glob.glob(os.path.join(self.input_dir, '*.groups.in')):
|
2017-08-31 16:05:07 +02:00
|
|
|
self._load_groups_file(fn)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _write_all_groups(self):
|
2017-09-04 14:06:15 +02:00
|
|
|
self._check_supplements()
|
2017-08-29 18:20:58 +02:00
|
|
|
for name in self.groups:
|
|
|
|
group = self.groups[name]
|
2017-09-04 13:28:39 +02:00
|
|
|
fn = '{}.group'.format(name)
|
2017-08-29 18:20:58 +02:00
|
|
|
if not group.solved:
|
|
|
|
logger.error('{} not solved'.format(name))
|
2017-09-04 13:28:39 +02:00
|
|
|
if os.path.exists(fn):
|
|
|
|
os.unlink(fn)
|
2017-08-29 18:20:58 +02:00
|
|
|
continue
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
|
|
|
for arch in sorted(group.architectures()):
|
|
|
|
x = group.toxml(arch)
|
|
|
|
if x is not None:
|
|
|
|
#fh.write(ET.tostring(x, pretty_print = True, doctype = '<?xml version="1.0" encoding="UTF-8"?>'))
|
|
|
|
fh.write(ET.tostring(x, pretty_print = True))
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _parse_product(self, root):
|
|
|
|
print(root.find('.//products/product/name').text)
|
|
|
|
for mnode in root.findall(".//mediasets/media"):
|
|
|
|
name = mnode.get('name')
|
|
|
|
print(' {}'.format(name))
|
|
|
|
for node in mnode.findall(".//use"):
|
|
|
|
print(' {}'.format(node.get('group')))
|
|
|
|
|
|
|
|
def list_products(self):
|
|
|
|
for fn in glob.glob('*.product'):
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.input_dir, fn), 'r') as fh:
|
2017-08-29 18:20:58 +02:00
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
root = ET.parse(fh).getroot()
|
|
|
|
self._parse_product(root)
|
|
|
|
|
|
|
|
def solve_group(self, name):
|
|
|
|
self._load_all_groups()
|
|
|
|
group = self.groups[name]
|
|
|
|
group.solve()
|
|
|
|
return group
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
def _check_supplements(self):
|
|
|
|
tocheck = set()
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
sel.add_raw(solv.Job.SOLVER_SOLVABLE, s.id)
|
|
|
|
|
|
|
|
for s in sel.solvables():
|
|
|
|
for dep in s.lookup_deparray(solv.SOLVABLE_SUPPLEMENTS):
|
|
|
|
for d in dep.str().split(' '):
|
|
|
|
if d.startswith('namespace:modalias') or d.startswith('namespace:filesystem'):
|
|
|
|
tocheck.add(s.name)
|
|
|
|
|
|
|
|
all_grouped = set()
|
|
|
|
for g in self.groups.values():
|
|
|
|
if g.solved:
|
|
|
|
for arch in g.solved_packages.keys():
|
2017-09-04 15:39:52 +02:00
|
|
|
if g.solved_packages[arch]:
|
|
|
|
all_grouped.update(g.solved_packages[arch])
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
for p in tocheck - all_grouped:
|
|
|
|
logger.warn('package %s has supplements but is not grouped', p)
|
|
|
|
|
|
|
|
def _prepare_pool(self, arch):
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch(arch)
|
|
|
|
|
|
|
|
# XXX
|
|
|
|
repo = pool.add_repo(FACTORY)
|
|
|
|
r = repo.add_solv(os.path.join(CACHEDIR, 'repo-{}-standard-{}.solv'.format(FACTORY, arch)))
|
|
|
|
if not r:
|
|
|
|
raise Exception("failed to add repo. Need to run update first?")
|
|
|
|
|
|
|
|
pool.addfileprovides()
|
|
|
|
pool.createwhatprovides()
|
|
|
|
|
|
|
|
return pool
|
2017-09-04 15:39:52 +02:00
|
|
|
|
|
|
|
def _collect_devel_packages(self):
|
|
|
|
srcpkgs = set()
|
|
|
|
for g in self.groups.values():
|
|
|
|
if g.srcpkgs:
|
|
|
|
srcpkgs.update(g.srcpkgs)
|
|
|
|
|
|
|
|
develpkgs = dict()
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
if s.name.endswith('-devel'):
|
|
|
|
# don't ask me why, but that's how it seems to work
|
|
|
|
if s.lookup_void(solv.SOLVABLE_SOURCENAME):
|
|
|
|
src = s.name
|
|
|
|
else:
|
|
|
|
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
|
|
|
|
|
|
|
|
if src in srcpkgs:
|
|
|
|
develpkgs.setdefault(arch, set()).add(s.name)
|
|
|
|
|
|
|
|
common = None
|
|
|
|
# compute common packages across all architectures
|
|
|
|
for arch in develpkgs.keys():
|
|
|
|
if common is None:
|
|
|
|
common = set(develpkgs[arch])
|
|
|
|
continue
|
|
|
|
common &= develpkgs[arch]
|
|
|
|
|
|
|
|
# reduce arch specific set by common ones
|
|
|
|
for arch in develpkgs.keys():
|
|
|
|
develpkgs[arch] -= common
|
|
|
|
|
|
|
|
develpkgs['*'] = common
|
|
|
|
|
|
|
|
g = Group('all-devel-pkgs', self)
|
|
|
|
# XXX: would need to add to packages instead, then solve and
|
|
|
|
# subtract all other groups
|
|
|
|
g.solved_packages = develpkgs
|
|
|
|
g.solved = True
|
|
|
|
|
2017-09-04 17:18:56 +02:00
|
|
|
def _collect_unsorted_packages(self):
|
|
|
|
|
|
|
|
packages = dict()
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
|
|
|
p = set([s.name for s in
|
|
|
|
pool.solvables_iter() if not
|
|
|
|
(s.name.endswith('-debuginfo') or
|
|
|
|
s.name.endswith('-debugsource'))])
|
|
|
|
|
|
|
|
for g in self.groups.values():
|
|
|
|
if g.solved:
|
|
|
|
for a in ('*', arch):
|
|
|
|
if a in g.solved_packages:
|
|
|
|
p -= g.solved_packages[a]
|
|
|
|
packages[arch] = p
|
|
|
|
|
|
|
|
common = None
|
|
|
|
# compute common packages across all architectures
|
|
|
|
for arch in packages.keys():
|
|
|
|
if common is None:
|
|
|
|
common = set(packages[arch])
|
|
|
|
continue
|
|
|
|
common &= packages[arch]
|
|
|
|
|
|
|
|
# reduce arch specific set by common ones
|
|
|
|
for arch in packages.keys():
|
|
|
|
packages[arch] -= common
|
|
|
|
|
|
|
|
packages['*'] = common
|
|
|
|
|
|
|
|
g = Group('unsorted', self)
|
|
|
|
g.solved_packages = packages
|
|
|
|
g.solved = True
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
class CommandLineInterface(ToolBase.CommandLineInterface):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToolBase.CommandLineInterface.__init__(self, args, kwargs)
|
|
|
|
|
|
|
|
def get_optparser(self):
|
|
|
|
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
|
|
|
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
|
|
|
help='project to process (default: %s)' % FACTORY,
|
|
|
|
default = FACTORY)
|
2017-09-04 13:28:39 +02:00
|
|
|
parser.add_option('-i', '--input-dir', dest='input_dir', metavar='DIR',
|
|
|
|
help='input directory', default = '.')
|
|
|
|
parser.add_option('-o', '--output-dir', dest='output_dir', metavar='DIR',
|
|
|
|
help='input directory', default = '.')
|
2017-08-29 18:20:58 +02:00
|
|
|
return parser
|
|
|
|
|
|
|
|
def setup_tool(self):
|
|
|
|
tool = PkgListGen(self.options.project)
|
2017-09-04 13:28:39 +02:00
|
|
|
tool.input_dir = self.options.input_dir
|
|
|
|
tool.output_dir = self.options.output_dir
|
2017-08-29 18:20:58 +02:00
|
|
|
return tool
|
|
|
|
|
|
|
|
|
|
|
|
def do_list(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all groups
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
|
|
|
|
for name in sorted(self.tool.groups.keys()):
|
|
|
|
print name
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
# to be called only once to bootstrap
|
|
|
|
def do_dump_supportstatus(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: dump supportstatus of input files
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
self.tool._dump_supportstatus()
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def do_list_products(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all products
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.list_products()
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def do_update(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
bs_mirrorfull = os.path.join(os.path.dirname(__file__), 'bs_mirrorfull')
|
|
|
|
repo = 'standard'
|
|
|
|
project = FACTORY
|
|
|
|
for arch in ARCHITECTURES:
|
2017-09-04 13:28:39 +02:00
|
|
|
d = os.path.join(CACHEDIR, 'repo-{}-{}-{}'.format(project, repo, arch))
|
2017-08-31 16:05:07 +02:00
|
|
|
logger.debug('updating %s', d)
|
|
|
|
subprocess.call([bs_mirrorfull, '{}/build/{}/{}/{}'.format(APIURL, project, repo, arch), d])
|
|
|
|
files = [ os.path.join(d, f) for f in os.listdir(d) if f.endswith('.rpm') ]
|
|
|
|
fh = open(d+'.solv', 'w')
|
|
|
|
p = subprocess.Popen(['rpms2solv', '-m', '-', '-0'], stdin = subprocess.PIPE, stdout = fh)
|
|
|
|
p.communicate('\0'.join(files))
|
|
|
|
p.wait()
|
|
|
|
fh.close()
|
|
|
|
|
|
|
|
def do_solve(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
class G(object):
|
|
|
|
True
|
|
|
|
|
|
|
|
g = G()
|
|
|
|
|
|
|
|
for name in self.tool.groups.keys():
|
|
|
|
# FIXME: tolower, replace dashes?
|
|
|
|
setattr(g, name, self.tool.groups[name])
|
|
|
|
|
|
|
|
g.sle_minimal.solve()
|
2017-08-31 17:09:50 +02:00
|
|
|
|
|
|
|
# g.release_packages_sles.solve()
|
|
|
|
# g.release_packages_leanos.solve(base = g.sle_minimal)
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
g.sle_base.solve(base = g.sle_minimal)
|
|
|
|
|
|
|
|
g.x11_base.solve(base = g.sle_base)
|
|
|
|
g.x11_extended.solve(base = g.x11_base)
|
2017-09-01 16:08:47 +02:00
|
|
|
g.x11_wayland.solve(base = g.x11_base)
|
2017-08-31 16:05:07 +02:00
|
|
|
|
|
|
|
g.desktop_icewm.solve(base = g.x11_extended)
|
|
|
|
|
|
|
|
g.fonts.solve(base = g.sle_minimal)
|
|
|
|
|
|
|
|
g.fonts_initrd.solve(base = g.fonts)
|
|
|
|
|
|
|
|
g.bootloader.solve(base = g.sle_base)
|
|
|
|
|
|
|
|
g.python.solve(base = g.sle_base)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-08-31 17:09:50 +02:00
|
|
|
g.php7.solve(base = g.sle_base)
|
|
|
|
|
2017-09-01 16:08:47 +02:00
|
|
|
g.sle_databases.solve(base = g.sle_base)
|
|
|
|
|
|
|
|
g.sle_webserver.solve(base = g.sle_base)
|
|
|
|
|
|
|
|
g.admin_tools.solve(base = g.sle_base)
|
|
|
|
|
|
|
|
g.ima_applications.solve(base = g.sle_base)
|
|
|
|
|
|
|
|
g.sle_devtools.solve(base = g.sle_base)
|
|
|
|
|
2017-08-31 17:09:50 +02:00
|
|
|
g.gnome_minimal.solve(base = (g.x11_extended, g.php7))
|
|
|
|
|
2017-09-01 16:08:47 +02:00
|
|
|
g.sle_misc_applications.solve(base = g.gnome_minimal)
|
|
|
|
g.sle_misc_applications2.solve(base = g.sle_misc_applications, without = "targetcli")
|
|
|
|
|
|
|
|
g.java_base.solve(base = g.gnome_minimal)
|
|
|
|
g.java.solve(base = g.java_base)
|
|
|
|
g.java_ibm.solve(base = g.java_base)
|
|
|
|
|
|
|
|
g.documentation_minimal.solve(base = g.gnome_minimal)
|
|
|
|
g.documentation_sles_basic.solve(base = g.documentation_minimal)
|
|
|
|
|
2017-09-01 15:34:07 +02:00
|
|
|
g.sled.solve(base = g.gnome_minimal, without = 'sles-release')
|
|
|
|
g.release_packages_sled.solve(base = g.sled, without = 'sles-release')
|
|
|
|
|
|
|
|
# NetworkManager unresolvable
|
2017-08-31 17:09:50 +02:00
|
|
|
# g.gnome_extended.solve(base = g.gnome_minimal)
|
|
|
|
|
|
|
|
g.qt_standard.solve(base = g.x11_extended)
|
|
|
|
g.qt_extended.solve(base = g.qt_standard)
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
# sle_base.dump()
|
|
|
|
|
2017-09-04 15:39:52 +02:00
|
|
|
self.tool._collect_devel_packages()
|
2017-09-04 17:18:56 +02:00
|
|
|
self.tool._collect_unsorted_packages()
|
2017-08-29 18:20:58 +02:00
|
|
|
self.tool._write_all_groups()
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
app = CommandLineInterface()
|
|
|
|
sys.exit( app.main() )
|
|
|
|
|
|
|
|
# vim: sw=4 et
|