2017-08-29 18:20:58 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright (c) 2017 SUSE LLC
|
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
|
|
|
|
2017-09-04 15:39:52 +02:00
|
|
|
# TODO: implement equivalent of namespace namespace:language(de) @SYSTEM
|
|
|
|
# TODO: solve all devel packages to include
|
2017-12-11 14:10:57 +01:00
|
|
|
from __future__ import print_function
|
2017-09-04 15:39:52 +02:00
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
import copy
|
2017-08-29 18:20:58 +02:00
|
|
|
from lxml import etree as ET
|
|
|
|
from collections import namedtuple
|
|
|
|
import sys
|
|
|
|
import cmdln
|
|
|
|
import logging
|
|
|
|
import urllib2
|
2017-12-21 00:32:20 -06:00
|
|
|
from osc.core import checkout_package
|
|
|
|
from osc.core import http_GET
|
|
|
|
from osc.core import makeurl
|
|
|
|
from osc.core import Package
|
|
|
|
from osc.core import show_results_meta
|
|
|
|
from osc.core import undelete_package
|
|
|
|
from osc import conf
|
|
|
|
from osclib.conf import Config
|
|
|
|
from osclib.stagingapi import StagingAPI
|
2018-02-05 19:54:43 -06:00
|
|
|
from osclib.util import project_list_family
|
|
|
|
from osclib.util import project_list_family_prior
|
2018-01-10 11:51:04 +01:00
|
|
|
from xdg.BaseDirectory import save_cache_path
|
2017-08-29 18:20:58 +02:00
|
|
|
import glob
|
|
|
|
import solv
|
|
|
|
from pprint import pprint, pformat
|
|
|
|
import os
|
2018-02-12 15:56:23 +01:00
|
|
|
import os.path
|
2017-08-31 16:05:07 +02:00
|
|
|
import subprocess
|
2017-09-04 17:47:12 +02:00
|
|
|
import re
|
2017-09-06 10:48:47 +02:00
|
|
|
import yaml
|
2017-12-06 16:25:09 +01:00
|
|
|
import requests
|
|
|
|
import urlparse
|
|
|
|
from StringIO import StringIO
|
|
|
|
import gzip
|
|
|
|
import tempfile
|
|
|
|
import random
|
2017-12-21 00:32:20 -06:00
|
|
|
import shutil
|
2017-12-06 16:25:09 +01:00
|
|
|
import string
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
import ToolBase
|
|
|
|
|
2017-09-04 13:28:39 +02:00
|
|
|
# share header cache with repochecker
|
|
|
|
from osclib.memoize import CACHEDIR
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
logger = logging.getLogger()
|
|
|
|
|
2018-01-09 17:24:38 -06:00
|
|
|
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
|
2017-10-20 10:51:06 +02:00
|
|
|
ARCHITECTURES = ['x86_64', 'ppc64le', 's390x', 'aarch64']
|
2017-10-10 08:24:52 +02:00
|
|
|
DEFAULT_REPOS = ("openSUSE:Factory/standard")
|
2017-12-21 00:32:20 -06:00
|
|
|
PRODUCT_SERVICE = '/usr/lib/obs/service/create_single_product'
|
2017-09-07 14:45:37 +02:00
|
|
|
|
2018-02-13 13:30:15 +01:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
class Group(object):
|
|
|
|
|
|
|
|
def __init__(self, name, pkglist):
|
|
|
|
self.name = name
|
2017-09-04 17:47:12 +02:00
|
|
|
self.safe_name = re.sub(r'\W', '_', name.lower())
|
2017-08-29 18:20:58 +02:00
|
|
|
self.pkglist = pkglist
|
2017-10-06 16:46:28 +02:00
|
|
|
self.architectures = pkglist.architectures
|
2017-08-29 18:20:58 +02:00
|
|
|
self.conditional = None
|
|
|
|
self.packages = dict()
|
2017-09-08 10:51:12 +02:00
|
|
|
self.locked = set()
|
2017-08-29 18:20:58 +02:00
|
|
|
self.solved_packages = None
|
|
|
|
self.solved = False
|
2017-09-14 16:52:57 +02:00
|
|
|
self.not_found = dict()
|
|
|
|
self.unresolvable = dict()
|
2017-12-11 14:10:57 +01:00
|
|
|
for a in ARCHITECTURES:
|
2017-09-15 08:14:19 +02:00
|
|
|
self.packages[a] = []
|
2017-09-14 16:52:57 +02:00
|
|
|
self.unresolvable[a] = dict()
|
2017-09-08 10:51:12 +02:00
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
self.comment = ' ### AUTOMATICALLY GENERATED, DO NOT EDIT ### '
|
2017-09-04 15:39:52 +02:00
|
|
|
self.srcpkgs = None
|
2018-02-12 18:02:28 +01:00
|
|
|
self.develpkgs = dict()
|
2017-09-08 10:51:12 +02:00
|
|
|
self.silents = set()
|
2017-10-21 20:41:23 +02:00
|
|
|
self.ignored = set()
|
2017-11-09 18:00:27 +01:00
|
|
|
# special feature for SLE. Patterns are marked for expansion
|
|
|
|
# of recommended packages, all others aren't. Only works
|
|
|
|
# with recommends on actual package names, not virtual
|
|
|
|
# provides.
|
|
|
|
self.expand_recommended = set()
|
2017-09-04 15:39:52 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
pkglist.groups[self.safe_name] = self
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-15 08:14:19 +02:00
|
|
|
def _add_to_packages(self, package, arch=None):
|
2017-10-06 16:46:28 +02:00
|
|
|
archs = self.architectures
|
2017-09-15 08:14:19 +02:00
|
|
|
if arch:
|
|
|
|
archs = [arch]
|
|
|
|
|
|
|
|
for a in archs:
|
|
|
|
self.packages[a].append([package, self.name])
|
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
def parse_yml(self, packages):
|
|
|
|
# package less group is a rare exception
|
|
|
|
if packages is None:
|
2017-09-17 12:06:38 +02:00
|
|
|
return
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
for package in packages:
|
|
|
|
if not isinstance(package, dict):
|
2017-09-15 08:14:19 +02:00
|
|
|
self._add_to_packages(package)
|
2017-09-12 19:36:30 +02:00
|
|
|
continue
|
|
|
|
name = package.keys()[0]
|
|
|
|
for rel in package[name]:
|
2017-10-22 13:28:06 +02:00
|
|
|
arch = None
|
2017-09-12 19:36:30 +02:00
|
|
|
if rel == 'locked':
|
|
|
|
self.locked.add(name)
|
2017-10-22 13:28:06 +02:00
|
|
|
continue
|
2017-09-12 19:36:30 +02:00
|
|
|
elif rel == 'silent':
|
|
|
|
self.silents.add(name)
|
2017-10-22 13:28:06 +02:00
|
|
|
elif rel == 'recommended':
|
2017-11-09 18:00:27 +01:00
|
|
|
self.expand_recommended.add(name)
|
2017-09-12 19:36:30 +02:00
|
|
|
else:
|
2017-10-22 13:28:06 +02:00
|
|
|
arch = rel
|
|
|
|
|
|
|
|
self._add_to_packages(name, arch)
|
2017-09-08 10:51:12 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
def _verify_solved(self):
|
|
|
|
if not self.solved:
|
|
|
|
raise Exception('group {} not solved'.format(self.name))
|
2017-09-07 14:45:37 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
def inherit(self, group):
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-09-15 08:14:19 +02:00
|
|
|
self.packages[arch] += group.packages[arch]
|
2017-09-08 10:51:12 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
self.locked.update(group.locked)
|
2017-09-09 12:52:22 +02:00
|
|
|
self.silents.update(group.silents)
|
2017-11-09 18:00:27 +01:00
|
|
|
self.expand_recommended.update(group.expand_recommended)
|
2017-09-09 12:52:22 +02:00
|
|
|
|
|
|
|
# do not repeat packages
|
|
|
|
def ignore(self, without):
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in ['*'] + self.architectures:
|
2017-09-14 15:27:48 +02:00
|
|
|
s = set(without.solved_packages[arch].keys())
|
2017-09-13 08:39:47 +02:00
|
|
|
s |= set(without.solved_packages['*'].keys())
|
|
|
|
for p in s:
|
|
|
|
self.solved_packages[arch].pop(p, None)
|
2017-09-14 16:52:57 +02:00
|
|
|
for p in without.not_found.keys():
|
|
|
|
if not p in self.not_found:
|
|
|
|
continue
|
|
|
|
self.not_found[p] -= without.not_found[p]
|
|
|
|
if not self.not_found[p]:
|
|
|
|
self.not_found.pop(p)
|
2017-10-21 20:41:23 +02:00
|
|
|
for g in without.ignored:
|
|
|
|
self.ignore(g)
|
|
|
|
self.ignored.add(without)
|
2017-09-07 14:45:37 +02:00
|
|
|
|
2018-02-13 13:30:15 +01:00
|
|
|
def solve(self, ignore_recommended=False, include_suggested=False):
|
2017-08-29 18:20:58 +02:00
|
|
|
""" base: list of base groups or None """
|
|
|
|
|
|
|
|
solved = dict()
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-09-12 19:36:30 +02:00
|
|
|
solved[arch] = dict()
|
2017-09-08 10:51:12 +02:00
|
|
|
|
2018-02-12 18:02:28 +01:00
|
|
|
self.srcpkgs = dict()
|
2017-09-17 12:06:38 +02:00
|
|
|
self.recommends = dict()
|
2017-11-09 18:00:27 +01:00
|
|
|
self.suggested = dict()
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-09-04 14:06:15 +02:00
|
|
|
pool = self.pkglist._prepare_pool(arch)
|
2017-09-17 12:06:38 +02:00
|
|
|
# pool.set_debuglevel(10)
|
2017-11-09 18:00:27 +01:00
|
|
|
suggested = []
|
2017-09-12 19:36:30 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
# packages resulting from explicit recommended expansion
|
|
|
|
extra = []
|
|
|
|
|
|
|
|
def solve_one_package(n, group):
|
2017-09-17 13:13:40 +02:00
|
|
|
jobs = list(self.pkglist.lockjobs[arch])
|
2017-08-29 18:20:58 +02:00
|
|
|
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
|
|
|
if sel.isempty():
|
2017-09-14 16:52:57 +02:00
|
|
|
logger.debug('{}.{}: package {} not found'.format(self.name, arch, n))
|
|
|
|
self.not_found.setdefault(n, set()).add(arch)
|
2017-11-09 18:00:27 +01:00
|
|
|
return
|
2017-08-31 17:09:50 +02:00
|
|
|
else:
|
2017-11-09 18:00:27 +01:00
|
|
|
if n in self.expand_recommended:
|
2017-10-22 13:28:06 +02:00
|
|
|
for s in sel.solvables():
|
|
|
|
for dep in s.lookup_deparray(solv.SOLVABLE_RECOMMENDS):
|
|
|
|
# only add recommends that exist as packages
|
|
|
|
rec = pool.select(dep.str(), solv.Selection.SELECTION_NAME)
|
|
|
|
if not rec.isempty():
|
2017-11-09 18:00:27 +01:00
|
|
|
extra.append([dep.str(), group + ":recommended:" + n])
|
2017-10-22 13:28:06 +02:00
|
|
|
|
2017-08-31 17:09:50 +02:00
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
|
|
|
|
2017-10-21 07:55:57 +02:00
|
|
|
locked = self.locked | self.pkglist.unwanted
|
|
|
|
for l in locked:
|
2017-09-12 19:36:30 +02:00
|
|
|
sel = pool.select(str(l), solv.Selection.SELECTION_NAME)
|
|
|
|
if sel.isempty():
|
2017-10-09 06:23:54 +02:00
|
|
|
# if we can't find it, it probably is not as important
|
|
|
|
logger.debug('{}.{}: locked package {} not found'.format(self.name, arch, l))
|
2017-09-12 19:36:30 +02:00
|
|
|
else:
|
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_LOCK)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
for s in self.silents:
|
2017-09-17 13:13:40 +02:00
|
|
|
sel = pool.select(str(s), solv.Selection.SELECTION_NAME | solv.Selection.SELECTION_FLAT)
|
2017-09-12 19:36:30 +02:00
|
|
|
if sel.isempty():
|
|
|
|
logger.warn('{}.{}: silent package {} not found'.format(self.name, arch, s))
|
|
|
|
else:
|
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
|
|
|
|
|
|
|
solver = pool.Solver()
|
|
|
|
if ignore_recommended:
|
|
|
|
solver.set_flag(solver.SOLVER_FLAG_IGNORE_RECOMMENDED, 1)
|
|
|
|
|
|
|
|
problems = solver.solve(jobs)
|
|
|
|
if problems:
|
|
|
|
for problem in problems:
|
2017-10-20 10:51:06 +02:00
|
|
|
msg = 'unresolvable: %s.%s: %s', self.name, arch, problem
|
|
|
|
if self.pkglist.ignore_broken:
|
|
|
|
logger.debug(msg)
|
|
|
|
else:
|
|
|
|
logger.debug(msg)
|
2017-09-14 16:52:57 +02:00
|
|
|
self.unresolvable[arch][n] = str(problem)
|
2017-11-09 18:00:27 +01:00
|
|
|
return
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
if hasattr(solver, 'get_recommended'):
|
2017-10-06 16:46:28 +02:00
|
|
|
for s in solver.get_recommended():
|
2017-10-21 07:55:57 +02:00
|
|
|
if s.name in locked:
|
|
|
|
continue
|
2017-10-06 16:46:28 +02:00
|
|
|
self.recommends.setdefault(s.name, group + ':' + n)
|
2017-10-27 17:18:37 +02:00
|
|
|
for s in solver.get_suggested():
|
2017-11-09 18:00:27 +01:00
|
|
|
suggested.append([s.name, group + ':suggested:' + n])
|
|
|
|
self.suggested.setdefault(s.name, group + ':' + n)
|
2017-10-06 16:46:28 +02:00
|
|
|
else:
|
|
|
|
logger.warn('newer libsolv needed for recommends!')
|
2017-09-17 12:06:38 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
trans = solver.transaction()
|
|
|
|
if trans.isempty():
|
|
|
|
logger.error('%s.%s: nothing to do', self.name, arch)
|
|
|
|
return
|
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
for s in trans.newsolvables():
|
2017-09-15 08:14:19 +02:00
|
|
|
solved[arch].setdefault(s.name, group + ':' + n)
|
2017-09-12 19:36:30 +02:00
|
|
|
reason, rule = solver.describe_decision(s)
|
|
|
|
if None:
|
|
|
|
print(self.name, s.name, reason, rule.info().problemstr())
|
|
|
|
# don't ask me why, but that's how it seems to work
|
|
|
|
if s.lookup_void(solv.SOLVABLE_SOURCENAME):
|
|
|
|
src = s.name
|
|
|
|
else:
|
|
|
|
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
|
2018-02-12 18:02:28 +01:00
|
|
|
self.srcpkgs[src] = group + ':' + s.name
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
for n, group in self.packages[arch]:
|
|
|
|
solve_one_package(n, group)
|
|
|
|
|
|
|
|
if include_suggested:
|
|
|
|
seen = set()
|
|
|
|
while suggested:
|
|
|
|
n, group = suggested.pop()
|
|
|
|
if n in seen:
|
|
|
|
continue
|
|
|
|
seen.add(n)
|
|
|
|
solve_one_package(n, group)
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
common = None
|
|
|
|
# compute common packages across all architectures
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-08-29 18:20:58 +02:00
|
|
|
if common is None:
|
2017-09-12 19:36:30 +02:00
|
|
|
common = set(solved[arch].keys())
|
2017-08-29 18:20:58 +02:00
|
|
|
continue
|
2017-09-12 19:36:30 +02:00
|
|
|
common &= set(solved[arch].keys())
|
2017-08-31 17:09:50 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
if common is None:
|
|
|
|
common = set()
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
# reduce arch specific set by common ones
|
2017-09-12 19:36:30 +02:00
|
|
|
solved['*'] = dict()
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-09-12 19:36:30 +02:00
|
|
|
for p in common:
|
|
|
|
solved['*'][p] = solved[arch].pop(p)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
self.solved_packages = solved
|
|
|
|
self.solved = True
|
2017-09-15 08:14:19 +02:00
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
def check_dups(self, modules, overlap):
|
2018-02-13 13:30:15 +01:00
|
|
|
if not overlap:
|
|
|
|
return
|
2017-10-09 06:23:54 +02:00
|
|
|
packages = set(self.solved_packages['*'])
|
2017-11-09 15:22:27 +01:00
|
|
|
for arch in self.architectures:
|
2017-10-09 06:23:54 +02:00
|
|
|
packages.update(self.solved_packages[arch])
|
|
|
|
for m in modules:
|
2017-10-23 12:47:24 +02:00
|
|
|
# do not check with ourselves and only once for the rest
|
2018-02-13 13:30:15 +01:00
|
|
|
if m.name <= self.name:
|
|
|
|
continue
|
2017-10-23 12:47:24 +02:00
|
|
|
if self.name in m.conflicts or m.name in self.conflicts:
|
|
|
|
continue
|
2017-10-09 06:23:54 +02:00
|
|
|
mp = set(m.solved_packages['*'])
|
2017-11-09 15:22:27 +01:00
|
|
|
for arch in self.architectures:
|
2017-10-09 06:23:54 +02:00
|
|
|
mp.update(m.solved_packages[arch])
|
|
|
|
if len(packages & mp):
|
2017-12-13 12:41:21 +01:00
|
|
|
overlap.comment += '\n overlapping between ' + self.name + ' and ' + m.name
|
2017-10-23 12:47:24 +02:00
|
|
|
for p in sorted(packages & mp):
|
2017-12-13 12:41:21 +01:00
|
|
|
overlap.comment += '\n - ' + p
|
|
|
|
overlap._add_to_packages(p)
|
2017-10-09 06:23:54 +02:00
|
|
|
|
2018-02-12 18:02:28 +01:00
|
|
|
def collect_devel_packages(self):
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-09-14 15:27:48 +02:00
|
|
|
pool = self.pkglist._prepare_pool(arch)
|
2017-09-13 08:39:47 +02:00
|
|
|
sel = pool.Selection()
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
if s.name.endswith('-devel'):
|
|
|
|
# don't ask me why, but that's how it seems to work
|
|
|
|
if s.lookup_void(solv.SOLVABLE_SOURCENAME):
|
|
|
|
src = s.name
|
|
|
|
else:
|
|
|
|
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
|
|
|
|
|
2018-02-12 18:02:28 +01:00
|
|
|
if src in self.srcpkgs.keys():
|
|
|
|
self.develpkgs[s.name] = self.srcpkgs[src]
|
2017-09-17 12:06:38 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
def _filter_already_selected(self, modules, pkgdict):
|
2017-09-17 12:06:38 +02:00
|
|
|
# erase our own - so we don't filter our own
|
2017-11-09 18:00:27 +01:00
|
|
|
for p in pkgdict.keys():
|
2017-09-17 12:06:38 +02:00
|
|
|
already_present = False
|
|
|
|
for m in modules:
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in ['*'] + self.architectures:
|
2017-09-17 12:06:38 +02:00
|
|
|
already_present = already_present or (p in m.solved_packages[arch])
|
2017-11-09 18:00:27 +01:00
|
|
|
if already_present:
|
|
|
|
del pkgdict[p]
|
|
|
|
|
|
|
|
def filter_already_selected(self, modules):
|
|
|
|
self._filter_already_selected(modules, self.recommends)
|
|
|
|
self._filter_already_selected(modules, self.suggested)
|
2017-09-09 12:52:22 +02:00
|
|
|
|
2018-02-13 13:30:15 +01:00
|
|
|
def toxml(self, arch, ignore_broken=False, comment=None):
|
2017-09-12 19:36:30 +02:00
|
|
|
packages = self.solved_packages[arch]
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
name = self.name
|
|
|
|
if arch != '*':
|
|
|
|
name += '.' + arch
|
|
|
|
|
2017-09-07 14:45:37 +02:00
|
|
|
root = ET.Element('group', {'name': name})
|
2017-12-13 12:41:21 +01:00
|
|
|
if comment:
|
|
|
|
c = ET.Comment(comment)
|
|
|
|
root.append(c)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
if arch != '*':
|
2017-09-09 12:52:22 +02:00
|
|
|
cond = ET.SubElement(root, 'conditional', {
|
|
|
|
'name': 'only_{}'.format(arch)})
|
|
|
|
packagelist = ET.SubElement(
|
|
|
|
root, 'packagelist', {'relationship': 'recommends'})
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-14 16:52:57 +02:00
|
|
|
missing = dict()
|
|
|
|
if arch == '*':
|
|
|
|
missing = self.not_found
|
|
|
|
unresolvable = self.unresolvable.get(arch, dict())
|
|
|
|
for name in sorted(packages.keys() + missing.keys() + unresolvable.keys()):
|
2017-09-13 08:39:47 +02:00
|
|
|
if name in self.silents:
|
|
|
|
continue
|
2017-09-14 16:52:57 +02:00
|
|
|
if name in missing:
|
2017-10-10 08:25:50 +02:00
|
|
|
msg = ' {} not found on {}'.format(name, ','.join(sorted(missing[name])))
|
|
|
|
if ignore_broken:
|
|
|
|
c = ET.Comment(msg)
|
|
|
|
packagelist.append(c)
|
|
|
|
continue
|
|
|
|
name = msg
|
2017-09-14 16:52:57 +02:00
|
|
|
if name in unresolvable:
|
2017-10-10 08:25:50 +02:00
|
|
|
msg = ' {} uninstallable: {}'.format(name, unresolvable[name])
|
|
|
|
if ignore_broken:
|
|
|
|
c = ET.Comment(msg)
|
|
|
|
packagelist.append(c)
|
|
|
|
continue
|
|
|
|
else:
|
2017-10-20 10:51:06 +02:00
|
|
|
logger.error(msg)
|
2017-10-10 08:25:50 +02:00
|
|
|
name = msg
|
2017-09-14 16:52:57 +02:00
|
|
|
status = self.pkglist.supportstatus(name)
|
2018-02-13 13:30:15 +01:00
|
|
|
attrs = {'name': name}
|
2017-10-06 16:46:28 +02:00
|
|
|
if status is not None:
|
|
|
|
attrs['supportstatus'] = status
|
|
|
|
p = ET.SubElement(packagelist, 'package', attrs)
|
2017-10-10 08:25:50 +02:00
|
|
|
if name in packages:
|
|
|
|
c = ET.Comment(' reason: {} '.format(packages[name]))
|
|
|
|
packagelist.append(c)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
return root
|
|
|
|
|
|
|
|
def dump(self):
|
2017-09-09 12:52:22 +02:00
|
|
|
pprint({'name': self.name, 'missing': self.missing, 'packages': self.packages,
|
|
|
|
'solved': self.solved_packages, 'silents': self.silents})
|
2017-09-12 19:36:30 +02:00
|
|
|
return
|
2017-10-06 16:46:28 +02:00
|
|
|
archs = ['*'] + self.architectures
|
2017-09-06 20:20:21 +02:00
|
|
|
for arch in archs:
|
2017-08-29 18:20:58 +02:00
|
|
|
x = self.toxml(arch)
|
2017-09-07 14:45:37 +02:00
|
|
|
print(ET.tostring(x, pretty_print=True))
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
class PkgListGen(ToolBase.ToolBase):
|
|
|
|
|
2017-10-10 08:24:52 +02:00
|
|
|
def __init__(self):
|
2017-08-29 18:20:58 +02:00
|
|
|
ToolBase.ToolBase.__init__(self)
|
2017-10-10 08:24:52 +02:00
|
|
|
self.repos = DEFAULT_REPOS
|
2017-08-29 18:20:58 +02:00
|
|
|
# package -> supportatus
|
|
|
|
self.packages = dict()
|
|
|
|
self.default_support_status = 'l3'
|
|
|
|
self.groups = dict()
|
|
|
|
self._supportstatus = None
|
2017-09-04 13:28:39 +02:00
|
|
|
self.input_dir = '.'
|
|
|
|
self.output_dir = '.'
|
2017-09-17 13:13:40 +02:00
|
|
|
self.lockjobs = dict()
|
2017-10-10 08:25:50 +02:00
|
|
|
self.ignore_broken = False
|
|
|
|
self.ignore_recommended = False
|
2017-11-09 18:00:27 +01:00
|
|
|
self.include_suggested = False
|
2017-11-09 15:22:27 +01:00
|
|
|
self.unwanted = set()
|
|
|
|
self.output = None
|
2017-11-24 13:39:30 +01:00
|
|
|
self.locales = set()
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def _dump_supportstatus(self):
|
|
|
|
for name in self.packages.keys():
|
|
|
|
for status in self.packages[name]:
|
|
|
|
if status == self.default_support_status:
|
|
|
|
continue
|
|
|
|
for group in self.packages[name][status]:
|
2017-12-11 14:10:57 +01:00
|
|
|
print(name, status)
|
2017-08-31 16:05:07 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def _load_supportstatus(self):
|
|
|
|
# XXX
|
2017-10-06 16:46:28 +02:00
|
|
|
fn = os.path.join(self.input_dir, 'supportstatus.txt')
|
|
|
|
self._supportstatus = dict()
|
|
|
|
if os.path.exists(fn):
|
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
for l in fh:
|
|
|
|
# pkg, status
|
|
|
|
a = l.rstrip().split(' ')
|
|
|
|
if len(a) > 1:
|
|
|
|
self._supportstatus[a[0]] = a[1]
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
# TODO: make per product
|
|
|
|
def supportstatus(self, package):
|
|
|
|
if self._supportstatus is None:
|
|
|
|
self._load_supportstatus()
|
|
|
|
|
|
|
|
if package in self._supportstatus:
|
|
|
|
return self._supportstatus[package]
|
|
|
|
else:
|
|
|
|
return self.default_support_status
|
|
|
|
|
|
|
|
def _load_group_file(self, fn):
|
2017-09-14 15:27:48 +02:00
|
|
|
output = None
|
2017-10-21 07:55:57 +02:00
|
|
|
unwanted = None
|
2017-08-29 18:20:58 +02:00
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
logger.debug("reading %s", fn)
|
2017-09-06 10:48:47 +02:00
|
|
|
for groupname, group in yaml.safe_load(fh).items():
|
2017-09-14 15:27:48 +02:00
|
|
|
if groupname == 'OUTPUT':
|
|
|
|
output = group
|
|
|
|
continue
|
2017-10-21 07:55:57 +02:00
|
|
|
if groupname == 'UNWANTED':
|
|
|
|
unwanted = set(group)
|
|
|
|
continue
|
2017-09-12 19:36:30 +02:00
|
|
|
g = Group(groupname, self)
|
|
|
|
g.parse_yml(group)
|
2017-10-21 07:55:57 +02:00
|
|
|
return output, unwanted
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def load_all_groups(self):
|
2017-09-06 10:48:47 +02:00
|
|
|
for fn in glob.glob(os.path.join(self.input_dir, 'group*.yml')):
|
2017-10-21 07:55:57 +02:00
|
|
|
o, u = self._load_group_file(fn)
|
2017-11-09 15:22:27 +01:00
|
|
|
if o:
|
|
|
|
if self.output is not None:
|
|
|
|
raise Exception('OUTPUT defined multiple times')
|
|
|
|
self.output = o
|
|
|
|
if u:
|
|
|
|
self.unwanted |= u
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _write_all_groups(self):
|
2017-09-04 14:06:15 +02:00
|
|
|
self._check_supplements()
|
2017-10-06 16:46:28 +02:00
|
|
|
archs = ['*'] + self.architectures
|
2017-08-29 18:20:58 +02:00
|
|
|
for name in self.groups:
|
|
|
|
group = self.groups[name]
|
|
|
|
if not group.solved:
|
2017-09-14 15:27:48 +02:00
|
|
|
continue
|
2017-12-13 12:41:21 +01:00
|
|
|
fn = '{}.group'.format(group.name)
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
2017-12-13 12:41:21 +01:00
|
|
|
comment = group.comment
|
2017-09-06 20:20:21 +02:00
|
|
|
for arch in archs:
|
2017-12-13 12:41:21 +01:00
|
|
|
x = group.toxml(arch, self.ignore_broken, comment)
|
|
|
|
# only comment first time
|
|
|
|
comment = None
|
2017-09-12 19:36:30 +02:00
|
|
|
x = ET.tostring(x, pretty_print=True)
|
|
|
|
x = re.sub('\s*<!-- reason:', ' <!-- reason:', x)
|
2017-09-07 14:45:37 +02:00
|
|
|
# fh.write(ET.tostring(x, pretty_print = True, doctype = '<?xml version="1.0" encoding="UTF-8"?>'))
|
2017-09-12 19:36:30 +02:00
|
|
|
fh.write(x)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _parse_product(self, root):
|
|
|
|
print(root.find('.//products/product/name').text)
|
|
|
|
for mnode in root.findall(".//mediasets/media"):
|
|
|
|
name = mnode.get('name')
|
|
|
|
print(' {}'.format(name))
|
|
|
|
for node in mnode.findall(".//use"):
|
|
|
|
print(' {}'.format(node.get('group')))
|
|
|
|
|
|
|
|
def list_products(self):
|
|
|
|
for fn in glob.glob('*.product'):
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.input_dir, fn), 'r') as fh:
|
2017-08-29 18:20:58 +02:00
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
root = ET.parse(fh).getroot()
|
|
|
|
self._parse_product(root)
|
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
def solve_module(self, groupname, includes, excludes):
|
2017-09-14 15:27:48 +02:00
|
|
|
g = self.groups[groupname]
|
|
|
|
for i in includes:
|
|
|
|
g.inherit(self.groups[i])
|
2017-11-09 18:00:27 +01:00
|
|
|
g.solve(self.ignore_recommended, self.include_suggested)
|
2017-09-14 15:27:48 +02:00
|
|
|
for e in excludes:
|
2017-09-14 16:52:57 +02:00
|
|
|
g.ignore(self.groups[e])
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
def _check_supplements(self):
|
|
|
|
tocheck = set()
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-09-04 14:06:15 +02:00
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
sel.add_raw(solv.Job.SOLVER_SOLVABLE, s.id)
|
|
|
|
|
|
|
|
for s in sel.solvables():
|
|
|
|
for dep in s.lookup_deparray(solv.SOLVABLE_SUPPLEMENTS):
|
|
|
|
for d in dep.str().split(' '):
|
|
|
|
if d.startswith('namespace:modalias') or d.startswith('namespace:filesystem'):
|
|
|
|
tocheck.add(s.name)
|
|
|
|
|
|
|
|
all_grouped = set()
|
|
|
|
for g in self.groups.values():
|
|
|
|
if g.solved:
|
|
|
|
for arch in g.solved_packages.keys():
|
2017-09-04 15:39:52 +02:00
|
|
|
if g.solved_packages[arch]:
|
|
|
|
all_grouped.update(g.solved_packages[arch])
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
for p in tocheck - all_grouped:
|
|
|
|
logger.warn('package %s has supplements but is not grouped', p)
|
|
|
|
|
|
|
|
def _prepare_pool(self, arch):
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch(arch)
|
|
|
|
|
2017-09-17 13:13:40 +02:00
|
|
|
self.lockjobs[arch] = []
|
|
|
|
solvables = set()
|
2017-11-24 13:39:30 +01:00
|
|
|
|
|
|
|
def cb(name, evr):
|
|
|
|
ret = 0
|
|
|
|
if name == solv.NAMESPACE_MODALIAS:
|
|
|
|
ret = 1
|
|
|
|
elif name == solv.NAMESPACE_FILESYSTEM:
|
|
|
|
ret = 1
|
|
|
|
elif name == solv.NAMESPACE_LANGUAGE:
|
|
|
|
if pool.id2str(evr) in self.locales:
|
|
|
|
ret = 1
|
|
|
|
else:
|
|
|
|
logger.warning('unhandled "{} {}"'.format(pool.id2str(name), pool.id2str(evr)))
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
if hasattr(pool, 'set_namespacecallback'):
|
|
|
|
pool.set_namespacecallback(cb)
|
|
|
|
else:
|
2018-02-12 18:02:28 +01:00
|
|
|
logger.debug('libsolv missing namespace callback')
|
2017-11-24 13:39:30 +01:00
|
|
|
|
2017-09-09 12:52:22 +02:00
|
|
|
for prp in self.repos:
|
2017-10-10 08:24:52 +02:00
|
|
|
project, reponame = prp.split('/')
|
2017-09-09 12:52:22 +02:00
|
|
|
repo = pool.add_repo(project)
|
2017-09-17 13:13:40 +02:00
|
|
|
s = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, reponame, arch))
|
2017-09-09 12:52:22 +02:00
|
|
|
r = repo.add_solv(s)
|
|
|
|
if not r:
|
2017-09-17 13:13:40 +02:00
|
|
|
raise Exception("failed to add repo {}/{}/{}. Need to run update first?".format(project, reponame, arch))
|
|
|
|
for solvable in repo.solvables_iter():
|
2017-10-17 09:10:17 +02:00
|
|
|
if solvable.name in solvables:
|
2018-02-13 13:30:15 +01:00
|
|
|
self.lockjobs[arch].append(pool.Job(solv.Job.SOLVER_SOLVABLE | solv.Job.SOLVER_LOCK, solvable.id))
|
2017-09-17 13:13:40 +02:00
|
|
|
solvables.add(solvable.name)
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
pool.addfileprovides()
|
|
|
|
pool.createwhatprovides()
|
|
|
|
|
|
|
|
return pool
|
2017-09-04 15:39:52 +02:00
|
|
|
|
2018-02-12 15:56:23 +01:00
|
|
|
# parse file and merge all groups
|
|
|
|
def _parse_unneeded(self, filename):
|
|
|
|
filename = os.path.join(self.input_dir, filename)
|
|
|
|
if not os.path.isfile(filename):
|
|
|
|
return set()
|
|
|
|
fh = open(filename, 'r')
|
|
|
|
logger.debug("reading %s", filename)
|
|
|
|
result = set()
|
|
|
|
for groupname, group in yaml.safe_load(fh).items():
|
|
|
|
result.update(group)
|
|
|
|
return result
|
|
|
|
|
2017-10-09 06:23:54 +02:00
|
|
|
def _collect_unsorted_packages(self, modules):
|
2018-02-12 15:56:23 +01:00
|
|
|
uneeded_regexps = [re.compile(r)
|
|
|
|
for r in self._parse_unneeded('unneeded.yml')]
|
|
|
|
|
2017-09-04 17:18:56 +02:00
|
|
|
packages = dict()
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-09-04 17:18:56 +02:00
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
2018-02-12 15:56:23 +01:00
|
|
|
archpacks = [s.name for s in pool.solvables_iter()]
|
|
|
|
for r in uneeded_regexps:
|
|
|
|
archpacks = [p for p in archpacks if not r.match(p)]
|
2017-09-04 17:18:56 +02:00
|
|
|
|
2018-02-12 15:56:23 +01:00
|
|
|
# convert to set
|
|
|
|
archpacks = set(archpacks) - self.unwanted
|
2017-10-09 06:23:54 +02:00
|
|
|
for g in modules:
|
|
|
|
for a in ('*', arch):
|
2018-02-12 15:56:23 +01:00
|
|
|
archpacks -= set(g.solved_packages[a].keys())
|
|
|
|
for package in archpacks:
|
2017-10-09 06:23:54 +02:00
|
|
|
packages.setdefault(package, []).append(arch)
|
|
|
|
|
|
|
|
with open(os.path.join(self.output_dir, 'unsorted.yml'), 'w') as fh:
|
|
|
|
fh.write("unsorted:\n")
|
|
|
|
for p in sorted(packages.keys()):
|
|
|
|
fh.write(" - ")
|
|
|
|
fh.write(p)
|
2017-11-09 15:22:27 +01:00
|
|
|
if len(packages[p]) != len(self.architectures):
|
2017-10-09 06:23:54 +02:00
|
|
|
fh.write(": [")
|
|
|
|
fh.write(','.join(sorted(packages[p])))
|
|
|
|
fh.write("]")
|
2018-02-12 18:02:28 +01:00
|
|
|
reason = self._find_reason(p, modules)
|
|
|
|
if reason:
|
|
|
|
fh.write(' # ' + reason)
|
2017-10-09 06:23:54 +02:00
|
|
|
fh.write(" \n")
|
2017-09-06 10:48:47 +02:00
|
|
|
|
2018-02-12 18:02:28 +01:00
|
|
|
# give a hint if the package is related to a group
|
|
|
|
def _find_reason(self, package, modules):
|
|
|
|
# go through the modules multiple times to find the "best"
|
|
|
|
for g in modules:
|
2018-02-13 13:30:15 +01:00
|
|
|
if package in g.recommends:
|
|
|
|
return 'recommended by ' + g.recommends[package]
|
2018-02-12 18:02:28 +01:00
|
|
|
for g in modules:
|
2018-02-13 13:30:15 +01:00
|
|
|
if package in g.suggested:
|
|
|
|
return 'suggested by ' + g.suggested[package]
|
2018-02-12 18:02:28 +01:00
|
|
|
for g in modules:
|
2018-02-13 13:30:15 +01:00
|
|
|
if package in g.develpkgs:
|
|
|
|
return 'devel package of ' + g.develpkgs[package]
|
2018-02-12 18:02:28 +01:00
|
|
|
return None
|
|
|
|
|
2018-02-13 13:30:15 +01:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
class CommandLineInterface(ToolBase.CommandLineInterface):
|
2017-12-21 00:32:20 -06:00
|
|
|
SCOPES = ['all', 'target', 'rings', 'staging', 'ports']
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToolBase.CommandLineInterface.__init__(self, args, kwargs)
|
|
|
|
|
|
|
|
def get_optparser(self):
|
|
|
|
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
2017-10-10 08:24:52 +02:00
|
|
|
parser.add_option('-r', '--repositories', dest='repos', metavar='REPOS', action='append',
|
|
|
|
help='repositories to process (%s)' % DEFAULT_REPOS)
|
2017-09-04 13:28:39 +02:00
|
|
|
parser.add_option('-i', '--input-dir', dest='input_dir', metavar='DIR',
|
2017-09-07 14:45:37 +02:00
|
|
|
help='input directory', default='.')
|
2017-09-04 13:28:39 +02:00
|
|
|
parser.add_option('-o', '--output-dir', dest='output_dir', metavar='DIR',
|
2017-09-07 14:45:37 +02:00
|
|
|
help='input directory', default='.')
|
2017-10-06 16:46:28 +02:00
|
|
|
parser.add_option('-a', '--architecture', dest='architectures', metavar='ARCH',
|
|
|
|
help='architecure', action='append')
|
|
|
|
parser.add_option('--default-support-status', dest='default_support_status', metavar='STATUS',
|
|
|
|
help='default support status', default=None)
|
2017-08-29 18:20:58 +02:00
|
|
|
return parser
|
|
|
|
|
|
|
|
def setup_tool(self):
|
2017-10-10 08:24:52 +02:00
|
|
|
tool = PkgListGen()
|
2017-09-04 13:28:39 +02:00
|
|
|
tool.input_dir = self.options.input_dir
|
|
|
|
tool.output_dir = self.options.output_dir
|
2017-10-06 16:46:28 +02:00
|
|
|
tool.default_support_status = self.options.default_support_status
|
|
|
|
if self.options.architectures:
|
|
|
|
tool.architectures = self.options.architectures
|
|
|
|
else:
|
|
|
|
tool.architectures = ARCHITECTURES
|
2017-10-10 08:24:52 +02:00
|
|
|
if self.options.repos:
|
|
|
|
repos = []
|
|
|
|
for r in self.options.repos:
|
|
|
|
# handle comas as well, easier for shell script for now
|
|
|
|
if ',' in r:
|
|
|
|
repos += r.split(',')
|
|
|
|
else:
|
|
|
|
repos.append(r)
|
|
|
|
tool.repos = repos
|
2017-08-29 18:20:58 +02:00
|
|
|
return tool
|
|
|
|
|
|
|
|
def do_list(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all groups
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
|
|
|
|
for name in sorted(self.tool.groups.keys()):
|
2017-12-11 14:10:57 +01:00
|
|
|
print(name)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
# to be called only once to bootstrap
|
|
|
|
def do_dump_supportstatus(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: dump supportstatus of input files
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
self.tool._dump_supportstatus()
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def do_list_products(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all products
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.list_products()
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def do_update(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2017-09-09 12:52:22 +02:00
|
|
|
# only there to parse the repos
|
2018-01-09 17:24:38 -06:00
|
|
|
bs_mirrorfull = os.path.join(SCRIPT_PATH, 'bs_mirrorfull')
|
2017-10-20 10:51:06 +02:00
|
|
|
global_update = False
|
2017-10-10 08:24:52 +02:00
|
|
|
for prp in self.tool.repos:
|
|
|
|
project, repo = prp.split('/')
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.tool.architectures:
|
2017-11-13 17:39:52 +01:00
|
|
|
# TODO: refactor to common function with repo_checker.py
|
|
|
|
d = os.path.join(CACHEDIR, project, repo, arch)
|
|
|
|
if not os.path.exists(d):
|
|
|
|
os.makedirs(d)
|
|
|
|
|
2017-09-09 12:52:22 +02:00
|
|
|
logger.debug('updating %s', d)
|
2017-10-20 10:51:06 +02:00
|
|
|
args = [bs_mirrorfull]
|
2017-11-09 15:03:03 +01:00
|
|
|
args.append('--nodebug')
|
2017-11-13 17:39:52 +01:00
|
|
|
args.append('{}/public/build/{}/{}/{}'.format(self.tool.apiurl, project, repo, arch))
|
2017-10-20 10:51:06 +02:00
|
|
|
args.append(d)
|
|
|
|
p = subprocess.Popen(args, stdout=subprocess.PIPE)
|
|
|
|
repo_update = False
|
|
|
|
for line in p.stdout:
|
2017-11-13 17:39:52 +01:00
|
|
|
logger.info(line.rstrip())
|
2017-10-20 10:51:06 +02:00
|
|
|
global_update = True
|
|
|
|
repo_update = True
|
2017-11-13 17:39:52 +01:00
|
|
|
|
|
|
|
solv_file = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
|
|
|
if os.path.exists(solv_file) and not repo_update:
|
2017-10-20 10:51:06 +02:00
|
|
|
continue
|
2017-09-09 12:52:22 +02:00
|
|
|
files = [os.path.join(d, f)
|
|
|
|
for f in os.listdir(d) if f.endswith('.rpm')]
|
2017-11-13 17:39:52 +01:00
|
|
|
fh = open(solv_file, 'w')
|
2017-09-09 12:52:22 +02:00
|
|
|
p = subprocess.Popen(
|
|
|
|
['rpms2solv', '-m', '-', '-0'], stdin=subprocess.PIPE, stdout=fh)
|
|
|
|
p.communicate('\0'.join(files))
|
|
|
|
p.wait()
|
|
|
|
fh.close()
|
2017-10-20 10:51:06 +02:00
|
|
|
return global_update
|
2017-08-31 16:05:07 +02:00
|
|
|
|
2018-02-05 19:47:54 -06:00
|
|
|
def update_merge(self, nonfree):
|
|
|
|
"""Merge free and nonfree solv files or copy free to merged"""
|
|
|
|
for prp in self.tool.repos:
|
|
|
|
project, repo = prp.split('/')
|
|
|
|
for arch in self.tool.architectures:
|
|
|
|
solv_file = os.path.join(
|
|
|
|
CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
|
|
|
solv_file_merged = os.path.join(
|
|
|
|
CACHEDIR, 'repo-{}-{}-{}.merged.solv'.format(project, repo, arch))
|
|
|
|
|
|
|
|
if not nonfree:
|
|
|
|
shutil.copyfile(solv_file, solv_file_merged)
|
|
|
|
continue
|
|
|
|
|
|
|
|
solv_file_nonfree = os.path.join(
|
|
|
|
CACHEDIR, 'repo-{}-{}-{}.solv'.format(nonfree, repo, arch))
|
2018-02-15 09:29:19 -06:00
|
|
|
self.solv_merge(solv_file_merged, solv_file, solv_file_nonfree)
|
|
|
|
|
|
|
|
def solv_merge(self, solv_merged, *solvs):
|
|
|
|
solvs = list(solvs) # From tuple.
|
2018-02-05 19:47:54 -06:00
|
|
|
|
2018-02-15 09:28:33 -06:00
|
|
|
if os.path.exists(solv_merged):
|
2018-02-15 09:29:19 -06:00
|
|
|
modified = map(os.path.getmtime, [solv_merged] + solvs)
|
|
|
|
if max(modified) <= modified[0]:
|
2018-02-15 09:28:33 -06:00
|
|
|
# The two inputs were modified before or at the same as merged.
|
|
|
|
logger.debug('merge skipped for {}'.format(solv_merged))
|
|
|
|
return
|
|
|
|
|
2018-02-05 19:47:54 -06:00
|
|
|
with open(solv_merged, 'w') as handle:
|
2018-02-15 09:29:19 -06:00
|
|
|
p = subprocess.Popen(['mergesolv'] + solvs, stdout=handle)
|
2018-02-05 19:47:54 -06:00
|
|
|
p.communicate()
|
|
|
|
|
|
|
|
if p.returncode:
|
|
|
|
raise Exception('failed to create merged solv file')
|
|
|
|
|
2017-12-04 18:23:33 +01:00
|
|
|
def do_create_droplist(self, subcmd, opts, *oldsolv):
|
|
|
|
"""${cmd_name}: generate list of obsolete packages
|
|
|
|
|
|
|
|
The globally specified repositories are taken as the current
|
|
|
|
package set. All solv files specified on the command line
|
|
|
|
are old versions of those repos.
|
|
|
|
|
|
|
|
The command outputs all package names that are no longer
|
|
|
|
contained in or provided by the current repos.
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
drops = dict()
|
|
|
|
|
|
|
|
for arch in self.tool.architectures:
|
|
|
|
|
|
|
|
for old in oldsolv:
|
|
|
|
|
|
|
|
logger.debug("%s: processing %s", arch, old)
|
|
|
|
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch(arch)
|
|
|
|
|
|
|
|
for prp in self.tool.repos:
|
|
|
|
project, repo = prp.split('/')
|
|
|
|
fn = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
|
|
|
r = pool.add_repo(prp)
|
|
|
|
r.add_solv(fn)
|
|
|
|
|
2018-02-05 19:47:54 -06:00
|
|
|
sysrepo = pool.add_repo(os.path.basename(old).replace('.merged.solv', ''))
|
2017-12-04 18:23:33 +01:00
|
|
|
sysrepo.add_solv(old)
|
|
|
|
|
|
|
|
pool.createwhatprovides()
|
|
|
|
|
|
|
|
for s in sysrepo.solvables:
|
|
|
|
haveit = False
|
|
|
|
for s2 in pool.whatprovides(s.nameid):
|
|
|
|
if s2.repo == sysrepo or s.nameid != s2.nameid:
|
|
|
|
continue
|
|
|
|
haveit = True
|
|
|
|
if haveit:
|
|
|
|
continue
|
|
|
|
nevr = pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
|
|
|
|
for s2 in pool.whatmatchesdep(solv.SOLVABLE_OBSOLETES, nevr):
|
|
|
|
if s2.repo == sysrepo:
|
|
|
|
continue
|
|
|
|
haveit = True
|
|
|
|
if haveit:
|
|
|
|
continue
|
|
|
|
if s.name not in drops:
|
|
|
|
drops[s.name] = sysrepo.name
|
|
|
|
|
|
|
|
# mark it explicitly to avoid having 2 pools while GC is not run
|
|
|
|
del pool
|
|
|
|
|
2018-02-05 19:48:45 -06:00
|
|
|
ofh = sys.stdout
|
|
|
|
if self.options.output_dir:
|
|
|
|
name = os.path.join(self.options.output_dir, 'obsoletepackages.inc')
|
|
|
|
ofh = open(name, 'w')
|
|
|
|
|
2017-12-04 18:23:33 +01:00
|
|
|
for reponame in sorted(set(drops.values())):
|
2018-02-05 19:48:45 -06:00
|
|
|
print("<!-- %s -->" % reponame, file=ofh)
|
2017-12-04 18:23:33 +01:00
|
|
|
for p in sorted(drops):
|
2018-02-13 13:30:15 +01:00
|
|
|
if drops[p] != reponame:
|
|
|
|
continue
|
2018-02-05 19:48:45 -06:00
|
|
|
print(" <obsoletepackage>%s</obsoletepackage>" % p, file=ofh)
|
2017-12-04 18:23:33 +01:00
|
|
|
|
2017-12-06 16:25:09 +01:00
|
|
|
@cmdln.option('--overwrite', action='store_true', help='overwrite if output file exists')
|
|
|
|
def do_dump_solv(self, subcmd, opts, baseurl):
|
|
|
|
"""${cmd_name}: fetch repomd and dump solv
|
|
|
|
|
2018-02-05 19:55:32 -06:00
|
|
|
Dumps solv from published repository. Use solve to generate from
|
|
|
|
pre-published repository.
|
|
|
|
|
2017-12-06 16:25:09 +01:00
|
|
|
If an output directory is specified, a file named according
|
|
|
|
to the build is created there. Otherwise the solv file is
|
|
|
|
dumped to stdout.
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
name = None
|
|
|
|
ofh = sys.stdout
|
|
|
|
if self.options.output_dir:
|
2018-02-05 19:51:12 -06:00
|
|
|
build, repo_style = self.dump_solv_build(baseurl)
|
|
|
|
name = '{}/{}.solv'.format(self.options.output_dir, build)
|
2017-12-06 16:25:09 +01:00
|
|
|
if not opts.overwrite and os.path.exists(name):
|
|
|
|
logger.info("%s exists", name)
|
2018-02-05 19:54:43 -06:00
|
|
|
return name
|
2017-12-06 16:25:09 +01:00
|
|
|
ofh = open(name + '.new', 'w')
|
|
|
|
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch()
|
|
|
|
|
|
|
|
repo = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
|
|
|
f = tempfile.TemporaryFile()
|
2018-02-05 19:51:12 -06:00
|
|
|
path_prefix = 'suse/' if name and repo_style == 'build' else ''
|
|
|
|
url = urlparse.urljoin(baseurl, path_prefix + 'repodata/repomd.xml')
|
2017-12-06 16:25:09 +01:00
|
|
|
repomd = requests.get(url)
|
2018-02-13 13:30:15 +01:00
|
|
|
ns = {'r': 'http://linux.duke.edu/metadata/repo'}
|
2017-12-06 16:25:09 +01:00
|
|
|
root = ET.fromstring(repomd.content)
|
|
|
|
location = root.find('.//r:data[@type="primary"]/r:location', ns).get('href')
|
|
|
|
f.write(repomd.content)
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
repo.add_repomdxml(f, 0)
|
2018-02-05 19:51:12 -06:00
|
|
|
url = urlparse.urljoin(baseurl, path_prefix + location)
|
2017-12-06 16:25:09 +01:00
|
|
|
with requests.get(url, stream=True) as primary:
|
|
|
|
content = gzip.GzipFile(fileobj=StringIO(primary.content))
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
f.write(content.read())
|
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
# TODO: verify checksum
|
|
|
|
repo.add_rpmmd(f, None, 0)
|
|
|
|
repo.create_stubs()
|
|
|
|
repo.write(ofh)
|
|
|
|
|
|
|
|
if name is not None:
|
|
|
|
os.rename(name + '.new', name)
|
2018-02-05 19:54:43 -06:00
|
|
|
return name
|
2017-12-04 18:23:33 +01:00
|
|
|
|
2018-02-05 19:51:12 -06:00
|
|
|
def dump_solv_build(self, baseurl):
|
|
|
|
"""Determine repo format and build string from remote repository."""
|
|
|
|
url = urlparse.urljoin(baseurl, 'media.1/media')
|
|
|
|
with requests.get(url) as media:
|
|
|
|
for i, line in enumerate(media.iter_lines()):
|
|
|
|
if i != 1:
|
|
|
|
continue
|
|
|
|
name = line
|
|
|
|
|
|
|
|
if name is not None and '-Build' in name:
|
|
|
|
return name, 'media'
|
|
|
|
|
|
|
|
url = urlparse.urljoin(baseurl, 'media.1/build')
|
|
|
|
with requests.get(url) as build:
|
|
|
|
name = build.content.strip()
|
|
|
|
|
|
|
|
if name is not None and '-Build' in name:
|
|
|
|
return name, 'build'
|
|
|
|
|
|
|
|
raise Exception('media.1/{media,build} includes no build number')
|
|
|
|
|
2017-10-10 08:25:50 +02:00
|
|
|
@cmdln.option('--ignore-unresolvable', action='store_true', help='ignore unresolvable and missing packges')
|
|
|
|
@cmdln.option('--ignore-recommended', action='store_true', help='do not include recommended packages automatically')
|
2017-11-09 18:00:27 +01:00
|
|
|
@cmdln.option('--include-suggested', action='store_true', help='include suggested packges also')
|
2017-11-24 13:39:30 +01:00
|
|
|
@cmdln.option('--locale', action='append', help='locales to inclues')
|
|
|
|
@cmdln.option('--locales-from', metavar='FILE', help='get supported locales from product file FILE')
|
2017-08-31 16:05:07 +02:00
|
|
|
def do_solve(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2018-02-05 19:55:32 -06:00
|
|
|
Generates solv from pre-published repository contained in local cache.
|
|
|
|
Use dump_solv to extract solv from published repository.
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2017-11-09 15:22:27 +01:00
|
|
|
self.tool.load_all_groups()
|
|
|
|
if not self.tool.output:
|
|
|
|
logger.error('OUTPUT not defined')
|
2017-09-14 15:27:48 +02:00
|
|
|
return
|
2017-09-15 08:14:19 +02:00
|
|
|
|
2017-10-10 08:25:50 +02:00
|
|
|
if opts.ignore_unresolvable:
|
|
|
|
self.tool.ignore_broken = True
|
|
|
|
if opts.ignore_recommended:
|
|
|
|
self.tool.ignore_recommended = True
|
2017-11-09 18:00:27 +01:00
|
|
|
if opts.include_suggested:
|
|
|
|
if opts.ignore_recommended:
|
|
|
|
raise cmdln.CmdlnUserError("--ignore-recommended and --include-suggested don't work together")
|
|
|
|
self.tool.include_suggested = True
|
2017-11-24 13:39:30 +01:00
|
|
|
if opts.locale:
|
|
|
|
for l in opts.locale:
|
|
|
|
self.tool.locales |= set(l.split(','))
|
|
|
|
if opts.locales_from:
|
|
|
|
with open(os.path.join(self.tool.input_dir, opts.locales_from), 'r') as fh:
|
|
|
|
root = ET.parse(fh).getroot()
|
2018-02-13 13:30:15 +01:00
|
|
|
self.tool.locales |= set([lang.text for lang in root.findall(".//linguas/language")])
|
2017-10-10 08:25:50 +02:00
|
|
|
|
2017-09-15 08:14:19 +02:00
|
|
|
modules = []
|
2017-09-14 15:27:48 +02:00
|
|
|
# the yml parser makes an array out of everything, so
|
|
|
|
# we loop a bit more than what we support
|
2017-11-09 15:22:27 +01:00
|
|
|
for group in self.tool.output:
|
2017-09-14 15:27:48 +02:00
|
|
|
groupname = group.keys()[0]
|
2017-09-17 12:06:38 +02:00
|
|
|
settings = group[groupname]
|
2017-09-14 15:27:48 +02:00
|
|
|
includes = settings.get('includes', [])
|
|
|
|
excludes = settings.get('excludes', [])
|
|
|
|
self.tool.solve_module(groupname, includes, excludes)
|
2017-10-23 12:47:24 +02:00
|
|
|
g = self.tool.groups[groupname]
|
|
|
|
g.conflicts = settings.get('conflicts', [])
|
|
|
|
modules.append(g)
|
2017-09-15 08:14:19 +02:00
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
# not defined for openSUSE
|
|
|
|
overlap = self.tool.groups.get('overlap')
|
2017-09-15 08:14:19 +02:00
|
|
|
for module in modules:
|
2017-12-13 12:41:21 +01:00
|
|
|
module.check_dups(modules, overlap)
|
2018-02-12 18:02:28 +01:00
|
|
|
module.collect_devel_packages()
|
2017-11-09 18:00:27 +01:00
|
|
|
module.filter_already_selected(modules)
|
2017-09-01 16:08:47 +02:00
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
if overlap:
|
2018-02-13 13:30:15 +01:00
|
|
|
ignores = [x.name for x in overlap.ignored]
|
2017-12-13 12:41:21 +01:00
|
|
|
self.tool.solve_module(overlap.name, [], ignores)
|
2017-12-15 09:03:46 +01:00
|
|
|
overlapped = set(overlap.solved_packages['*'])
|
|
|
|
for arch in overlap.architectures:
|
|
|
|
overlapped |= set(overlap.solved_packages[arch])
|
|
|
|
for module in modules:
|
2018-02-13 13:30:15 +01:00
|
|
|
if module.name == 'overlap' or module in overlap.ignored:
|
|
|
|
continue
|
2017-12-15 09:03:46 +01:00
|
|
|
for arch in ['*'] + module.architectures:
|
|
|
|
for p in overlapped:
|
|
|
|
module.solved_packages[arch].pop(p, None)
|
2017-12-13 12:41:21 +01:00
|
|
|
|
2017-10-09 06:23:54 +02:00
|
|
|
self.tool._collect_unsorted_packages(modules)
|
2017-09-04 17:47:12 +02:00
|
|
|
self.tool._write_all_groups()
|
2017-09-01 16:08:47 +02:00
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
@cmdln.option('-f', '--force', action='store_true', help='continue even if build is in progress')
|
|
|
|
@cmdln.option('-p', '--project', help='target project')
|
|
|
|
@cmdln.option('-s', '--scope', default='all', help='scope on which to operate ({})'.format(', '.join(SCOPES)))
|
|
|
|
def do_update_and_solve(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: update and solve for given scope
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not opts.project:
|
|
|
|
raise ValueError('project is required')
|
|
|
|
if opts.scope not in self.SCOPES:
|
|
|
|
raise ValueError('scope must be one of: {}'.format(', '.join(self.SCOPES)))
|
|
|
|
|
|
|
|
if opts.scope == 'all':
|
|
|
|
for scope in self.SCOPES[1:]:
|
|
|
|
opts.scope = scope
|
|
|
|
self.do_update_and_solve(subcmd, copy.deepcopy(opts))
|
|
|
|
return
|
|
|
|
|
|
|
|
# Store target project as opts.project will contain subprojects.
|
|
|
|
target_project = opts.project
|
|
|
|
|
|
|
|
config = Config(target_project)
|
|
|
|
apiurl = conf.config['apiurl']
|
|
|
|
api = StagingAPI(apiurl, target_project)
|
|
|
|
config.apply_remote(api)
|
|
|
|
|
|
|
|
target_config = conf.config[target_project]
|
|
|
|
archs_key = 'pkglistgen-archs' if opts.scope != 'ports' else 'pkglistgen-archs-ports'
|
|
|
|
if archs_key in target_config:
|
|
|
|
self.options.architectures = target_config.get(archs_key).split(' ')
|
|
|
|
main_repo = target_config['main-repo']
|
|
|
|
|
|
|
|
if opts.scope == 'target':
|
|
|
|
self.options.repos = ['/'.join([target_project, main_repo])]
|
2018-02-05 19:54:43 -06:00
|
|
|
self.update_and_solve_target(apiurl, target_project, target_config, main_repo, opts, drop_list=True)
|
2017-12-21 00:32:20 -06:00
|
|
|
return
|
|
|
|
elif opts.scope == 'ports':
|
|
|
|
# TODO Continue supporting #1297, but should be abstracted.
|
|
|
|
main_repo = 'ports'
|
|
|
|
opts.project += ':Ports'
|
|
|
|
self.options.repos = ['/'.join([opts.project, main_repo])]
|
2018-02-05 19:54:43 -06:00
|
|
|
self.update_and_solve_target(apiurl, target_project, target_config, main_repo, opts, drop_list=True)
|
2017-12-21 00:32:20 -06:00
|
|
|
return
|
|
|
|
elif opts.scope == 'rings':
|
|
|
|
opts.project = api.rings[1]
|
|
|
|
self.options.repos = [
|
|
|
|
'/'.join([api.rings[1], main_repo]),
|
|
|
|
'/'.join([api.rings[0], main_repo]),
|
|
|
|
]
|
|
|
|
self.update_and_solve_target(apiurl, target_project, target_config, main_repo, opts)
|
|
|
|
|
|
|
|
opts.project = api.rings[2]
|
|
|
|
self.options.repos.insert(0, '/'.join([api.rings[2], main_repo]))
|
|
|
|
self.update_and_solve_target(apiurl, target_project, target_config, main_repo, opts, skip_release=True)
|
|
|
|
return
|
|
|
|
elif opts.scope == 'staging':
|
|
|
|
letters = api.get_staging_projects_short()
|
|
|
|
for letter in letters:
|
|
|
|
opts.project = api.prj_from_short(letter)
|
|
|
|
self.options.repos = ['/'.join([opts.project, main_repo])]
|
|
|
|
|
|
|
|
if not api.is_staging_bootstrapped(opts.project):
|
|
|
|
self.options.repos.append('/'.join([opts.project, 'bootstrap_copy']))
|
|
|
|
|
|
|
|
# DVD project first since it depends on main.
|
2018-02-09 15:52:48 -06:00
|
|
|
if api.item_exists(opts.project + ':DVD'):
|
2017-12-21 00:32:20 -06:00
|
|
|
opts_dvd = copy.deepcopy(opts)
|
2018-01-09 17:25:24 -06:00
|
|
|
opts_dvd.project += ':DVD'
|
2018-01-10 14:03:47 +01:00
|
|
|
self.options.repos.insert(0, '/'.join([opts_dvd.project, main_repo]))
|
2017-12-21 00:32:20 -06:00
|
|
|
self.update_and_solve_target(apiurl, target_project, target_config, main_repo, opts_dvd, skip_release=True)
|
2018-01-10 14:03:47 +01:00
|
|
|
self.options.repos.pop(0)
|
2017-12-21 00:32:20 -06:00
|
|
|
|
|
|
|
self.update_and_solve_target(apiurl, target_project, target_config, main_repo, opts)
|
|
|
|
return
|
|
|
|
|
|
|
|
def update_and_solve_target(self, apiurl, target_project, target_config, main_repo, opts,
|
2018-02-05 19:54:43 -06:00
|
|
|
skip_release=False, drop_list=False):
|
2018-01-08 22:46:11 -06:00
|
|
|
print('[{}] {}/{}: update and solve'.format(opts.scope, opts.project, main_repo))
|
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
group = target_config.get('pkglistgen-group', '000package-groups')
|
|
|
|
product = target_config.get('pkglistgen-product', '000product')
|
|
|
|
release = target_config.get('pkglistgen-release', '000release-packages')
|
|
|
|
|
|
|
|
url = makeurl(apiurl, ['source', opts.project])
|
|
|
|
packages = ET.parse(http_GET(url)).getroot()
|
|
|
|
if packages.find('entry[@name="{}"]'.format(product)) is None:
|
2018-01-08 22:45:28 -06:00
|
|
|
if not self.options.dry:
|
|
|
|
undelete_package(apiurl, opts.project, product, 'revive')
|
2017-12-21 00:32:20 -06:00
|
|
|
# TODO disable build.
|
|
|
|
print('{} undeleted, skip dvd until next cycle'.format(product))
|
|
|
|
return
|
|
|
|
elif not opts.force:
|
|
|
|
root = ET.fromstringlist(show_results_meta(apiurl, opts.project, product,
|
|
|
|
repository=[main_repo], multibuild=True))
|
|
|
|
if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
|
|
|
|
print('{}/{} build in progress'.format(opts.project, product))
|
|
|
|
return
|
|
|
|
|
|
|
|
checkout_list = [group, product]
|
|
|
|
if not skip_release:
|
|
|
|
checkout_list.append(release)
|
|
|
|
|
|
|
|
if packages.find('entry[@name="{}"]'.format(release)) is None:
|
2018-01-08 22:45:28 -06:00
|
|
|
if not self.options.dry:
|
|
|
|
undelete_package(apiurl, opts.project, product, 'revive')
|
2017-12-21 00:32:20 -06:00
|
|
|
print('{} undeleted, skip dvd until next cycle'.format(release))
|
|
|
|
return
|
|
|
|
|
|
|
|
# Cache dir specific to hostname and project.
|
|
|
|
host = urlparse.urlparse(apiurl).hostname
|
2018-01-10 11:51:04 +01:00
|
|
|
cache_dir = save_cache_path('opensuse-packagelists', host, opts.project)
|
2017-12-21 00:32:20 -06:00
|
|
|
|
|
|
|
if os.path.exists(cache_dir):
|
|
|
|
shutil.rmtree(cache_dir)
|
|
|
|
os.makedirs(cache_dir)
|
|
|
|
|
|
|
|
group_dir = os.path.join(cache_dir, group)
|
|
|
|
product_dir = os.path.join(cache_dir, product)
|
|
|
|
release_dir = os.path.join(cache_dir, release)
|
|
|
|
|
|
|
|
for package in checkout_list:
|
|
|
|
checkout_package(apiurl, opts.project, package, expand_link=True, prj_dir=cache_dir)
|
|
|
|
|
|
|
|
if not skip_release:
|
|
|
|
self.unlink_all_except(release_dir)
|
|
|
|
self.unlink_all_except(product_dir)
|
|
|
|
self.copy_directory_contents(group_dir, product_dir,
|
|
|
|
['supportstatus.txt', 'groups.yml', 'package-groups.changes'])
|
|
|
|
self.change_extension(product_dir, '.spec.in', '.spec')
|
|
|
|
|
2018-01-08 22:43:51 -06:00
|
|
|
self.options.input_dir = group_dir
|
2017-12-21 00:32:20 -06:00
|
|
|
self.options.output_dir = product_dir
|
|
|
|
self.postoptparse()
|
|
|
|
|
2018-01-08 22:46:11 -06:00
|
|
|
print('-> do_update')
|
2017-12-21 00:32:20 -06:00
|
|
|
self.do_update('update', opts)
|
|
|
|
|
2018-02-05 19:54:43 -06:00
|
|
|
nonfree = target_config.get('nonfree')
|
|
|
|
if nonfree and drop_list:
|
|
|
|
print('-> do_update nonfree')
|
|
|
|
|
|
|
|
# Switch to nonfree repo (ugly, but that's how the code was setup).
|
|
|
|
self.options.repos_ = self.options.repos
|
|
|
|
self.options.repos = ['/'.join([nonfree, main_repo])]
|
|
|
|
self.postoptparse()
|
|
|
|
|
|
|
|
opts_nonfree = copy.deepcopy(opts)
|
|
|
|
opts_nonfree.project = nonfree
|
|
|
|
self.do_update('update', opts_nonfree)
|
|
|
|
|
|
|
|
# Switch repo back to main target project.
|
|
|
|
self.options.repos = self.options.repos_
|
|
|
|
self.postoptparse()
|
|
|
|
|
|
|
|
print('-> update_merge')
|
|
|
|
self.update_merge(nonfree if drop_list else False)
|
|
|
|
|
2018-01-08 22:46:11 -06:00
|
|
|
print('-> do_solve')
|
2018-01-08 22:44:29 -06:00
|
|
|
opts.ignore_unresolvable = bool(target_config.get('pkglistgen-ignore-unresolvable'))
|
|
|
|
opts.ignore_recommended = bool(target_config.get('pkglistgen-ignore-recommended'))
|
2017-12-21 00:32:20 -06:00
|
|
|
opts.include_suggested = bool(target_config.get('pkglistgen-include-suggested'))
|
2018-01-08 22:44:29 -06:00
|
|
|
opts.locale = target_config.get('pkglistgen-local')
|
2018-01-10 14:10:17 +01:00
|
|
|
opts.locales_from = target_config.get('pkglistgen-locales-from')
|
2017-12-21 00:32:20 -06:00
|
|
|
self.do_solve('solve', opts)
|
|
|
|
|
2018-02-05 19:54:43 -06:00
|
|
|
if drop_list:
|
|
|
|
# Ensure solv files from all releases in product family are updated.
|
|
|
|
print('-> solv_cache_update')
|
|
|
|
cache_dir_solv = save_cache_path('opensuse-packagelists', 'solv')
|
2018-02-12 17:56:29 -06:00
|
|
|
family_last = target_config.get('pkglistgen-product-family-last')
|
2018-02-05 19:54:43 -06:00
|
|
|
family_include = target_config.get('pkglistgen-product-family-include')
|
|
|
|
solv_prior = self.solv_cache_update(
|
2018-02-12 17:56:29 -06:00
|
|
|
apiurl, cache_dir_solv, target_project, family_last, family_include, opts)
|
2018-02-05 19:54:43 -06:00
|
|
|
|
|
|
|
# Include pre-final release solv files for target project. These
|
|
|
|
# files will only exist from previous runs.
|
|
|
|
cache_dir_solv_current = os.path.join(cache_dir_solv, target_project)
|
|
|
|
solv_prior.update(glob.glob(os.path.join(cache_dir_solv_current, '*.merged.solv')))
|
|
|
|
for solv_file in solv_prior:
|
|
|
|
logger.debug(solv_file.replace(cache_dir_solv, ''))
|
|
|
|
|
|
|
|
print('-> do_create_droplist')
|
|
|
|
# Reset to product after solv_cache_update().
|
|
|
|
self.options.output_dir = product_dir
|
|
|
|
self.do_create_droplist('create_droplist', opts, *solv_prior)
|
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
|
|
|
|
self.unlink_list(product_dir, delete_products)
|
|
|
|
|
2018-01-08 22:46:11 -06:00
|
|
|
print('-> product service')
|
2017-12-21 00:32:20 -06:00
|
|
|
for product_file in glob.glob(os.path.join(product_dir, '*.product')):
|
|
|
|
print(subprocess.check_output(
|
|
|
|
[PRODUCT_SERVICE, product_file, product_dir, opts.project]))
|
|
|
|
|
2018-01-17 14:33:36 +01:00
|
|
|
delete_kiwis = target_config.get('pkglistgen-delete-kiwis-{}'.format(opts.scope), '').split(' ')
|
2017-12-21 00:32:20 -06:00
|
|
|
self.unlink_list(product_dir, delete_kiwis)
|
|
|
|
|
|
|
|
spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
|
|
|
|
if skip_release:
|
|
|
|
self.unlink_list(None, spec_files)
|
|
|
|
else:
|
|
|
|
self.move_list(spec_files, release_dir)
|
|
|
|
|
|
|
|
self.multibuild_from_glob(product_dir, '*.kiwi')
|
|
|
|
self.build_stub(product_dir, 'kiwi')
|
|
|
|
self.commit_package(product_dir)
|
|
|
|
|
|
|
|
if not skip_release:
|
|
|
|
self.multibuild_from_glob(release_dir, '*.spec')
|
|
|
|
self.build_stub(release_dir, 'spec')
|
|
|
|
self.commit_package(release_dir)
|
|
|
|
|
2018-02-12 17:56:29 -06:00
|
|
|
def solv_cache_update(self, apiurl, cache_dir_solv, target_project, family_last, family_include, opts):
|
2018-02-05 19:54:43 -06:00
|
|
|
"""Dump solv files (do_dump_solv) for all products in family."""
|
|
|
|
prior = set()
|
|
|
|
|
2018-02-12 17:56:29 -06:00
|
|
|
project_family = project_list_family_prior(
|
|
|
|
apiurl, target_project, include_self=True, last=family_last)
|
2018-02-05 19:54:43 -06:00
|
|
|
if family_include:
|
|
|
|
# Include projects from a different family if desired.
|
|
|
|
project_family.extend(project_list_family(apiurl, family_include))
|
|
|
|
|
|
|
|
for project in project_family:
|
|
|
|
config = Config(project)
|
|
|
|
project_config = conf.config[project]
|
|
|
|
|
|
|
|
baseurl = project_config.get('download-baseurl')
|
|
|
|
if not baseurl:
|
|
|
|
logger.warning('no baseurl configured for {}'.format(project))
|
|
|
|
continue
|
|
|
|
|
|
|
|
urls = [urlparse.urljoin(baseurl, 'repo/oss/')]
|
|
|
|
if project_config.get('nonfree'):
|
|
|
|
urls.append(urlparse.urljoin(baseurl, 'repo/non-oss/'))
|
|
|
|
|
|
|
|
names = []
|
|
|
|
for url in urls:
|
|
|
|
print('-> do_dump_solv for {}/{}'.format(
|
|
|
|
project, os.path.basename(os.path.normpath(url))))
|
|
|
|
logger.debug(url)
|
|
|
|
|
|
|
|
self.options.output_dir = os.path.join(cache_dir_solv, project)
|
|
|
|
if not os.path.exists(self.options.output_dir):
|
|
|
|
os.makedirs(self.options.output_dir)
|
|
|
|
|
|
|
|
opts.overwrite = False
|
|
|
|
names.append(self.do_dump_solv('dump_solv', opts, url))
|
|
|
|
|
|
|
|
if not len(names):
|
|
|
|
logger.warning('no solv files were dumped for {}'.format(project))
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Merge nonfree solv with free solv or copy free solv as merged.
|
|
|
|
merged = names[0].replace('.solv', '.merged.solv')
|
|
|
|
if len(names) == 2:
|
2018-02-15 09:29:19 -06:00
|
|
|
self.solv_merge(merged, *names)
|
2018-02-05 19:54:43 -06:00
|
|
|
else:
|
|
|
|
shutil.copyfile(names[0], merged)
|
|
|
|
prior.add(merged)
|
|
|
|
|
|
|
|
return prior
|
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
def move_list(self, file_list, destination):
|
|
|
|
for name in file_list:
|
|
|
|
os.rename(name, os.path.join(destination, os.path.basename(name)))
|
|
|
|
|
|
|
|
def unlink_list(self, path, names):
|
|
|
|
for name in names:
|
|
|
|
if path is None:
|
|
|
|
name_path = name
|
|
|
|
else:
|
|
|
|
name_path = os.path.join(path, name)
|
|
|
|
|
|
|
|
if os.path.isfile(name_path):
|
|
|
|
os.unlink(name_path)
|
|
|
|
|
|
|
|
def unlink_all_except(self, path, ignore_list=['_service'], ignore_hidden=True):
|
|
|
|
for name in os.listdir(path):
|
|
|
|
if name in ignore_list or (ignore_hidden and name.startswith('.')):
|
|
|
|
continue
|
|
|
|
|
|
|
|
name_path = os.path.join(path, name)
|
|
|
|
if os.path.isfile(name_path):
|
|
|
|
os.unlink(name_path)
|
|
|
|
|
|
|
|
def copy_directory_contents(self, source, destination, ignore_list=[]):
|
|
|
|
for name in os.listdir(source):
|
|
|
|
name_path = os.path.join(source, name)
|
|
|
|
if name in ignore_list or not os.path.isfile(name_path):
|
|
|
|
continue
|
|
|
|
|
|
|
|
shutil.copy(name_path, os.path.join(destination, name))
|
|
|
|
|
|
|
|
def change_extension(self, path, original, final):
|
|
|
|
for name in glob.glob(os.path.join(path, '*{}'.format(original))):
|
|
|
|
# Assumes the extension is only found at the end.
|
|
|
|
os.rename(name, name.replace(original, final))
|
|
|
|
|
|
|
|
def multibuild_from_glob(self, destination, pathname):
|
|
|
|
root = ET.Element('multibuild')
|
|
|
|
for name in glob.glob(os.path.join(destination, pathname)):
|
|
|
|
package = ET.SubElement(root, 'package')
|
|
|
|
package.text = os.path.splitext(os.path.basename(name))[0]
|
|
|
|
|
|
|
|
with open(os.path.join(destination, '_multibuild'), 'w+b') as f:
|
|
|
|
f.write(ET.tostring(root, pretty_print=True))
|
|
|
|
|
|
|
|
def build_stub(self, destination, extension):
|
|
|
|
f = file(os.path.join(destination, '.'.join(['stub', extension])), 'w+')
|
|
|
|
f.write('# prevent building single {} files twice\n'.format(extension))
|
|
|
|
f.write('Name: stub\n')
|
|
|
|
f.write('Version: 0.0\n')
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
def commit_package(self, path):
|
|
|
|
package = Package(path)
|
|
|
|
if self.options.dry:
|
|
|
|
for i in package.get_diff():
|
|
|
|
print(''.join(i))
|
|
|
|
else:
|
|
|
|
# No proper API function to perform the same operation.
|
|
|
|
print(subprocess.check_output(
|
|
|
|
' '.join(['cd', path, '&&', 'osc', 'addremove']), shell=True))
|
|
|
|
package.commit(msg='Automatic update')
|
|
|
|
|
2017-09-09 12:52:22 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
app = CommandLineInterface()
|
2017-09-07 14:45:37 +02:00
|
|
|
sys.exit(app.main())
|