2017-08-29 18:20:58 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright (c) 2017 SUSE LLC
|
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
|
|
|
|
|
|
|
from lxml import etree as ET
|
|
|
|
from collections import namedtuple
|
|
|
|
import sys
|
|
|
|
import cmdln
|
|
|
|
import logging
|
|
|
|
import urllib2
|
|
|
|
import osc.core
|
|
|
|
import glob
|
|
|
|
import solv
|
|
|
|
from pprint import pprint, pformat
|
|
|
|
import os
|
2017-08-31 16:05:07 +02:00
|
|
|
import subprocess
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
import ToolBase
|
|
|
|
|
|
|
|
logger = logging.getLogger()
|
|
|
|
|
|
|
|
FACTORY = "SUSE:SLE-15:GA"
|
|
|
|
ARCHITECTURES = ('x86_64', 'ppc64le', 's390x')
|
2017-08-31 16:05:07 +02:00
|
|
|
APIURL = 'https://api.suse.de/public/'
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
class Group(object):
|
|
|
|
|
|
|
|
def __init__(self, name, pkglist):
|
|
|
|
self.name = name
|
|
|
|
self.pkglist = pkglist
|
|
|
|
self.conditional = None
|
|
|
|
self.packages = dict()
|
|
|
|
self.solved_packages = None
|
|
|
|
self.solved = False
|
|
|
|
self.base = None
|
|
|
|
|
|
|
|
def get_solved_packages_recursive(self, arch):
|
|
|
|
if not self.solved:
|
|
|
|
raise Exception('group {} not solved'.format(self.name))
|
|
|
|
|
|
|
|
solved = self.solved_packages['*'] | self.solved_packages[arch]
|
|
|
|
logger.debug("{}.{} have {} packages".format(self.name, arch, len(solved)))
|
|
|
|
if self.base:
|
|
|
|
for b in self.base:
|
|
|
|
solved |= b.get_solved_packages_recursive(arch)
|
|
|
|
|
|
|
|
return solved
|
|
|
|
|
|
|
|
def solve(self, base = None):
|
|
|
|
""" base: list of base groups or None """
|
|
|
|
|
|
|
|
if self.solved:
|
|
|
|
return
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
if isinstance(base, Group):
|
|
|
|
base = [ base ]
|
|
|
|
if not (base is None or isinstance(base, list) or isinstance(base, tuple)):
|
|
|
|
raise Exception("base must be list but is {}".format(type(base)))
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
solved = dict()
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch(arch)
|
|
|
|
|
|
|
|
# XXX
|
|
|
|
repo = pool.add_repo('full')
|
|
|
|
repo.add_solv('repo-{}-standard-{}.solv'.format(FACTORY, arch))
|
|
|
|
|
|
|
|
pool.addfileprovides()
|
|
|
|
pool.createwhatprovides()
|
|
|
|
|
|
|
|
jobs = []
|
|
|
|
toinstall = set(self.packages['*'])
|
|
|
|
basepackages = set()
|
|
|
|
logger.debug("{}: {} common packages".format(self.name, len(toinstall)))
|
|
|
|
if arch in self.packages:
|
|
|
|
logger.debug("{}: {} {} packages".format(self.name, arch, len(self.packages[arch])))
|
|
|
|
toinstall |= self.packages[arch]
|
|
|
|
if base:
|
|
|
|
for b in base:
|
|
|
|
logger.debug("{} adding packges from {}".format(self.name, b.name))
|
|
|
|
basepackages |= b.get_solved_packages_recursive(arch)
|
|
|
|
toinstall |= basepackages
|
|
|
|
for n in toinstall:
|
|
|
|
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
|
|
|
if sel.isempty():
|
|
|
|
logger.error('{}.{}: package {} not found'.format(self.name, arch, n))
|
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
|
|
|
|
|
|
|
solver = pool.Solver()
|
|
|
|
problems = solver.solve(jobs)
|
|
|
|
if problems:
|
|
|
|
for problem in problems:
|
|
|
|
# just ignore conflicts here
|
|
|
|
#if not ' conflicts with ' in str(problem):
|
|
|
|
logger.error(problem)
|
|
|
|
raise Exception('unresolvable')
|
|
|
|
#logger.warning(problem)
|
|
|
|
|
|
|
|
trans = solver.transaction()
|
|
|
|
if trans.isempty():
|
|
|
|
raise Exception('nothing to do')
|
|
|
|
|
|
|
|
solved[arch] = set([ s.name for s in trans.newsolvables() ])
|
|
|
|
if basepackages:
|
|
|
|
self.base = list(base)
|
|
|
|
solved[arch] -= basepackages
|
|
|
|
|
|
|
|
common = None
|
|
|
|
# compute common packages across all architectures
|
|
|
|
for arch in solved.keys():
|
|
|
|
if common is None:
|
|
|
|
common = set(solved[arch])
|
|
|
|
continue
|
|
|
|
common &= solved[arch]
|
|
|
|
# reduce arch specific set by common ones
|
|
|
|
for arch in solved.keys():
|
|
|
|
solved[arch] -= common
|
|
|
|
|
|
|
|
self.solved_packages = solved
|
|
|
|
self.solved_packages['*'] = common
|
|
|
|
|
|
|
|
self.solved = True
|
|
|
|
|
|
|
|
def architectures(self):
|
|
|
|
return self.solved_packages.keys()
|
|
|
|
|
|
|
|
def toxml(self, arch):
|
|
|
|
|
|
|
|
packages = None
|
|
|
|
autodeps = None
|
|
|
|
|
|
|
|
if arch in self.solved_packages:
|
|
|
|
autodeps = self.solved_packages[arch]
|
|
|
|
|
|
|
|
if arch in self.packages:
|
|
|
|
packages = self.packages[arch]
|
|
|
|
if autodeps:
|
|
|
|
autodeps -= self.packages[arch]
|
|
|
|
|
|
|
|
if not packages and not autodeps:
|
|
|
|
return None
|
|
|
|
|
|
|
|
name = self.name
|
|
|
|
if arch != '*':
|
|
|
|
name += '.' + arch
|
|
|
|
|
|
|
|
root = ET.Element('group', { 'name' : name})
|
|
|
|
c = ET.Comment(' ### AUTOMATICALLY GENERATED, DO NOT EDIT ### ')
|
|
|
|
root.append(c)
|
|
|
|
|
|
|
|
if self.base:
|
|
|
|
for b in self.base:
|
|
|
|
c = ET.Comment(' based on {} '.format(', '.join([b.name for b in self.base])))
|
|
|
|
root.append(c)
|
|
|
|
|
|
|
|
if arch != '*':
|
|
|
|
cond = ET.SubElement(root, 'conditional', {'name': 'only_{}'.format(arch)})
|
|
|
|
packagelist = ET.SubElement(root, 'packagelist', {'relationship': 'recommends'})
|
|
|
|
|
|
|
|
if packages:
|
|
|
|
for name in sorted(packages):
|
|
|
|
p = ET.SubElement(packagelist, 'package', {
|
|
|
|
'name' : name,
|
|
|
|
'supportstatus' : self.pkglist.supportstatus(name)
|
|
|
|
})
|
|
|
|
|
|
|
|
if autodeps:
|
|
|
|
c = ET.Comment(' automatic dependencies ')
|
|
|
|
packagelist.append(c)
|
|
|
|
|
|
|
|
for name in sorted(autodeps):
|
|
|
|
p = ET.SubElement(packagelist, 'package', {
|
|
|
|
'name' : name,
|
|
|
|
'supportstatus' : self.pkglist.supportstatus(name)
|
|
|
|
})
|
|
|
|
|
|
|
|
return root
|
|
|
|
|
|
|
|
def dump(self):
|
|
|
|
for arch in sorted(self.architectures()):
|
|
|
|
x = self.toxml(arch)
|
|
|
|
if x is not None:
|
|
|
|
print(ET.tostring(x, pretty_print = True))
|
|
|
|
|
|
|
|
class PkgListGen(ToolBase.ToolBase):
|
|
|
|
|
|
|
|
def __init__(self, project):
|
|
|
|
ToolBase.ToolBase.__init__(self)
|
|
|
|
self.project = project
|
|
|
|
# package -> supportatus
|
|
|
|
self.packages = dict()
|
|
|
|
self.default_support_status = 'l3'
|
|
|
|
self.groups = dict()
|
|
|
|
self._supportstatus = None
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def _dump_supportstatus(self):
|
|
|
|
for name in self.packages.keys():
|
|
|
|
for status in self.packages[name]:
|
|
|
|
if status == self.default_support_status:
|
|
|
|
continue
|
|
|
|
for group in self.packages[name][status]:
|
|
|
|
print group, name, status
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def _load_supportstatus(self):
|
|
|
|
# XXX
|
|
|
|
with open('supportstatus.txt', 'r') as fh:
|
|
|
|
self._supportstatus = dict()
|
|
|
|
for l in fh.readlines():
|
|
|
|
group, pkg, status = l.split(' ')
|
|
|
|
self._supportstatus[pkg] = status
|
|
|
|
|
|
|
|
# TODO: make per product
|
|
|
|
def supportstatus(self, package):
|
|
|
|
if self._supportstatus is None:
|
|
|
|
self._load_supportstatus()
|
|
|
|
|
|
|
|
if package in self._supportstatus:
|
|
|
|
return self._supportstatus[package]
|
|
|
|
else:
|
|
|
|
return self.default_support_status
|
|
|
|
|
|
|
|
# XXX: move to group class. just here to check supportstatus
|
|
|
|
def _parse_group(self, root):
|
|
|
|
groupname = root.get('name')
|
|
|
|
group = Group(groupname, self)
|
|
|
|
for node in root.findall(".//package"):
|
|
|
|
name = node.get('name')
|
|
|
|
arch = node.get('arch', '*')
|
|
|
|
status = node.get('supportstatus') or ''
|
2017-08-31 16:05:07 +02:00
|
|
|
logger.debug('group %s, package %s, status %s', groupname, name, status)
|
|
|
|
self.packages.setdefault(name, dict())
|
|
|
|
self.packages[name].setdefault(status, set()).add(groupname)
|
|
|
|
if len(self.packages[name]) > 1:
|
|
|
|
logger.error("multiple supports states for {}: {}".format(name, pformat(self.packages[name])))
|
2017-08-29 18:20:58 +02:00
|
|
|
group.packages.setdefault(arch, set()).add(name)
|
|
|
|
return group
|
|
|
|
|
|
|
|
def _load_group_file(self, fn):
|
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
root = ET.parse(fh).getroot()
|
2017-08-31 16:05:07 +02:00
|
|
|
g = self._parse_group(root)
|
|
|
|
self.groups[g.name] = g
|
|
|
|
|
|
|
|
def _load_groups_file(self, fn):
|
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
xml = '<groups>' + ''.join(fh.readlines()) + '</groups>'
|
|
|
|
root = ET.fromstring(xml)
|
|
|
|
for groupnode in root.findall("./group"):
|
|
|
|
g = self._parse_group(groupnode)
|
2017-08-29 18:20:58 +02:00
|
|
|
self.groups[g.name] = g
|
|
|
|
|
|
|
|
def load_all_groups(self):
|
|
|
|
for fn in glob.glob('*.group.in'):
|
|
|
|
self._load_group_file(fn)
|
2017-08-31 16:05:07 +02:00
|
|
|
for fn in glob.glob('*.groups.in'):
|
|
|
|
self._load_groups_file(fn)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _write_all_groups(self):
|
|
|
|
for name in self.groups:
|
|
|
|
group = self.groups[name]
|
|
|
|
if not group.solved:
|
|
|
|
logger.error('{} not solved'.format(name))
|
|
|
|
continue
|
|
|
|
for arch in sorted(group.architectures()):
|
|
|
|
if arch != '*':
|
|
|
|
fn = '{}.{}.group'.format(name, arch)
|
|
|
|
else:
|
|
|
|
fn = '{}.group'.format(name)
|
|
|
|
x = group.toxml(arch)
|
|
|
|
if x is None:
|
2017-08-31 16:05:07 +02:00
|
|
|
if os.path.exists(fn):
|
|
|
|
os.unlink(fn)
|
2017-08-29 18:20:58 +02:00
|
|
|
else:
|
|
|
|
with open(fn, 'w') as fh:
|
|
|
|
fh.write(ET.tostring(x, pretty_print = True))
|
|
|
|
|
|
|
|
def _parse_product(self, root):
|
|
|
|
print(root.find('.//products/product/name').text)
|
|
|
|
for mnode in root.findall(".//mediasets/media"):
|
|
|
|
name = mnode.get('name')
|
|
|
|
print(' {}'.format(name))
|
|
|
|
for node in mnode.findall(".//use"):
|
|
|
|
print(' {}'.format(node.get('group')))
|
|
|
|
|
|
|
|
def list_products(self):
|
|
|
|
for fn in glob.glob('*.product'):
|
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
root = ET.parse(fh).getroot()
|
|
|
|
self._parse_product(root)
|
|
|
|
|
|
|
|
def solve_group(self, name):
|
|
|
|
self._load_all_groups()
|
|
|
|
group = self.groups[name]
|
|
|
|
group.solve()
|
|
|
|
return group
|
|
|
|
|
|
|
|
class CommandLineInterface(ToolBase.CommandLineInterface):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToolBase.CommandLineInterface.__init__(self, args, kwargs)
|
|
|
|
|
|
|
|
def get_optparser(self):
|
|
|
|
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
|
|
|
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
|
|
|
help='project to process (default: %s)' % FACTORY,
|
|
|
|
default = FACTORY)
|
|
|
|
return parser
|
|
|
|
|
|
|
|
def setup_tool(self):
|
|
|
|
tool = PkgListGen(self.options.project)
|
|
|
|
return tool
|
|
|
|
|
|
|
|
|
|
|
|
def do_list(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all groups
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
|
|
|
|
for name in sorted(self.tool.groups.keys()):
|
|
|
|
print name
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
# to be called only once to bootstrap
|
|
|
|
def do_dump_supportstatus(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: dump supportstatus of input files
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
self.tool._dump_supportstatus()
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def do_list_products(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all products
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.list_products()
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def do_update(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
bs_mirrorfull = os.path.join(os.path.dirname(__file__), 'bs_mirrorfull')
|
|
|
|
repo = 'standard'
|
|
|
|
project = FACTORY
|
|
|
|
for arch in ARCHITECTURES:
|
|
|
|
d = 'repo-{}-{}-{}'.format(project, repo, arch)
|
|
|
|
logger.debug('updating %s', d)
|
|
|
|
subprocess.call([bs_mirrorfull, '{}/build/{}/{}/{}'.format(APIURL, project, repo, arch), d])
|
|
|
|
files = [ os.path.join(d, f) for f in os.listdir(d) if f.endswith('.rpm') ]
|
|
|
|
fh = open(d+'.solv', 'w')
|
|
|
|
p = subprocess.Popen(['rpms2solv', '-m', '-', '-0'], stdin = subprocess.PIPE, stdout = fh)
|
|
|
|
p.communicate('\0'.join(files))
|
|
|
|
p.wait()
|
|
|
|
fh.close()
|
|
|
|
|
|
|
|
def do_solve(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
class G(object):
|
|
|
|
True
|
|
|
|
|
|
|
|
g = G()
|
|
|
|
|
|
|
|
for name in self.tool.groups.keys():
|
|
|
|
# FIXME: tolower, replace dashes?
|
|
|
|
setattr(g, name, self.tool.groups[name])
|
|
|
|
|
|
|
|
g.sle_minimal.solve()
|
|
|
|
g.sle_base.solve(base = g.sle_minimal)
|
|
|
|
|
|
|
|
g.x11_base.solve(base = g.sle_base)
|
|
|
|
g.x11_extended.solve(base = g.x11_base)
|
|
|
|
|
|
|
|
g.desktop_icewm.solve(base = g.x11_extended)
|
|
|
|
|
|
|
|
g.fonts.solve(base = g.sle_minimal)
|
|
|
|
|
|
|
|
g.fonts_initrd.solve(base = g.fonts)
|
|
|
|
|
|
|
|
g.bootloader.solve(base = g.sle_base)
|
|
|
|
|
|
|
|
g.python.solve(base = g.sle_base)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
# sle_base.dump()
|
|
|
|
|
|
|
|
self.tool._write_all_groups()
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
app = CommandLineInterface()
|
|
|
|
sys.exit( app.main() )
|
|
|
|
|
|
|
|
# vim: sw=4 et
|