2017-08-29 18:20:58 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2017-09-04 15:39:52 +02:00
|
|
|
# TODO: implement equivalent of namespace namespace:language(de) @SYSTEM
|
|
|
|
# TODO: solve all devel packages to include
|
2017-12-11 14:10:57 +01:00
|
|
|
from __future__ import print_function
|
2017-09-04 15:39:52 +02:00
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
import copy
|
2017-08-29 18:20:58 +02:00
|
|
|
from lxml import etree as ET
|
|
|
|
from collections import namedtuple
|
|
|
|
import sys
|
|
|
|
import cmdln
|
|
|
|
import logging
|
|
|
|
import urllib2
|
2018-02-15 09:32:12 -06:00
|
|
|
import filecmp
|
2017-12-21 00:32:20 -06:00
|
|
|
from osc.core import checkout_package
|
2018-07-16 15:16:33 +02:00
|
|
|
from osc.core import http_GET, http_PUT
|
2017-12-21 00:32:20 -06:00
|
|
|
from osc.core import makeurl
|
|
|
|
from osc.core import Package
|
2018-09-27 07:37:07 +02:00
|
|
|
from osc.core import HTTPError
|
2017-12-21 00:32:20 -06:00
|
|
|
from osc.core import show_results_meta
|
|
|
|
from osc.core import undelete_package
|
|
|
|
from osc import conf
|
2018-09-04 15:03:03 -05:00
|
|
|
from osclib.cache_manager import CacheManager
|
2018-03-09 13:14:01 +01:00
|
|
|
from osclib.conf import Config, str2bool
|
2018-08-21 02:18:37 -05:00
|
|
|
from osclib.core import repository_path_expand
|
2018-09-27 07:37:07 +02:00
|
|
|
from osclib.core import repository_arch_state
|
2018-10-03 07:36:42 +02:00
|
|
|
from osclib.core import source_file_ensure
|
2018-10-01 20:57:43 +02:00
|
|
|
from osclib.core import target_archs
|
2017-12-21 00:32:20 -06:00
|
|
|
from osclib.stagingapi import StagingAPI
|
2018-02-05 19:54:43 -06:00
|
|
|
from osclib.util import project_list_family
|
|
|
|
from osclib.util import project_list_family_prior
|
2017-08-29 18:20:58 +02:00
|
|
|
import glob
|
2018-03-08 12:23:21 -06:00
|
|
|
import hashlib
|
2018-03-08 12:22:02 -06:00
|
|
|
import io
|
2017-08-29 18:20:58 +02:00
|
|
|
import solv
|
|
|
|
from pprint import pprint, pformat
|
|
|
|
import os
|
2018-02-12 15:56:23 +01:00
|
|
|
import os.path
|
2017-08-31 16:05:07 +02:00
|
|
|
import subprocess
|
2017-09-04 17:47:12 +02:00
|
|
|
import re
|
2017-09-06 10:48:47 +02:00
|
|
|
import yaml
|
2017-12-06 16:25:09 +01:00
|
|
|
import requests
|
|
|
|
import urlparse
|
|
|
|
import gzip
|
|
|
|
import tempfile
|
2018-04-11 23:51:49 -05:00
|
|
|
import traceback
|
2017-12-06 16:25:09 +01:00
|
|
|
import random
|
2017-12-21 00:32:20 -06:00
|
|
|
import shutil
|
2017-12-06 16:25:09 +01:00
|
|
|
import string
|
2018-09-26 13:47:31 +02:00
|
|
|
import time
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
import ToolBase
|
|
|
|
|
2017-09-04 13:28:39 +02:00
|
|
|
# share header cache with repochecker
|
2018-09-04 15:03:03 -05:00
|
|
|
CACHEDIR = CacheManager.directory('repository-meta')
|
2017-09-04 13:28:39 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
logger = logging.getLogger()
|
|
|
|
|
2018-01-09 17:24:38 -06:00
|
|
|
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
|
2017-10-20 10:51:06 +02:00
|
|
|
ARCHITECTURES = ['x86_64', 'ppc64le', 's390x', 'aarch64']
|
2017-12-21 00:32:20 -06:00
|
|
|
PRODUCT_SERVICE = '/usr/lib/obs/service/create_single_product'
|
2017-09-07 14:45:37 +02:00
|
|
|
|
2018-02-13 13:30:15 +01:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
class Group(object):
|
|
|
|
|
|
|
|
def __init__(self, name, pkglist):
|
|
|
|
self.name = name
|
2017-09-04 17:47:12 +02:00
|
|
|
self.safe_name = re.sub(r'\W', '_', name.lower())
|
2017-08-29 18:20:58 +02:00
|
|
|
self.pkglist = pkglist
|
2017-10-06 16:46:28 +02:00
|
|
|
self.architectures = pkglist.architectures
|
2017-08-29 18:20:58 +02:00
|
|
|
self.conditional = None
|
|
|
|
self.packages = dict()
|
2017-09-08 10:51:12 +02:00
|
|
|
self.locked = set()
|
2017-08-29 18:20:58 +02:00
|
|
|
self.solved_packages = None
|
|
|
|
self.solved = False
|
2017-09-14 16:52:57 +02:00
|
|
|
self.not_found = dict()
|
|
|
|
self.unresolvable = dict()
|
2018-07-04 15:47:12 +02:00
|
|
|
self.default_support_status = None
|
2017-12-11 14:10:57 +01:00
|
|
|
for a in ARCHITECTURES:
|
2017-09-15 08:14:19 +02:00
|
|
|
self.packages[a] = []
|
2017-09-14 16:52:57 +02:00
|
|
|
self.unresolvable[a] = dict()
|
2017-09-08 10:51:12 +02:00
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
self.comment = ' ### AUTOMATICALLY GENERATED, DO NOT EDIT ### '
|
2017-09-04 15:39:52 +02:00
|
|
|
self.srcpkgs = None
|
2018-02-12 18:02:28 +01:00
|
|
|
self.develpkgs = dict()
|
2017-09-08 10:51:12 +02:00
|
|
|
self.silents = set()
|
2017-10-21 20:41:23 +02:00
|
|
|
self.ignored = set()
|
2017-11-09 18:00:27 +01:00
|
|
|
# special feature for SLE. Patterns are marked for expansion
|
|
|
|
# of recommended packages, all others aren't. Only works
|
|
|
|
# with recommends on actual package names, not virtual
|
|
|
|
# provides.
|
|
|
|
self.expand_recommended = set()
|
2017-09-04 15:39:52 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
pkglist.groups[self.safe_name] = self
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-15 08:14:19 +02:00
|
|
|
def _add_to_packages(self, package, arch=None):
|
2017-10-06 16:46:28 +02:00
|
|
|
archs = self.architectures
|
2017-09-15 08:14:19 +02:00
|
|
|
if arch:
|
|
|
|
archs = [arch]
|
|
|
|
|
|
|
|
for a in archs:
|
|
|
|
self.packages[a].append([package, self.name])
|
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
def parse_yml(self, packages):
|
|
|
|
# package less group is a rare exception
|
|
|
|
if packages is None:
|
2017-09-17 12:06:38 +02:00
|
|
|
return
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
for package in packages:
|
|
|
|
if not isinstance(package, dict):
|
2017-09-15 08:14:19 +02:00
|
|
|
self._add_to_packages(package)
|
2017-09-12 19:36:30 +02:00
|
|
|
continue
|
|
|
|
name = package.keys()[0]
|
|
|
|
for rel in package[name]:
|
2017-10-22 13:28:06 +02:00
|
|
|
arch = None
|
2017-09-12 19:36:30 +02:00
|
|
|
if rel == 'locked':
|
|
|
|
self.locked.add(name)
|
2017-10-22 13:28:06 +02:00
|
|
|
continue
|
2017-09-12 19:36:30 +02:00
|
|
|
elif rel == 'silent':
|
|
|
|
self.silents.add(name)
|
2017-10-22 13:28:06 +02:00
|
|
|
elif rel == 'recommended':
|
2017-11-09 18:00:27 +01:00
|
|
|
self.expand_recommended.add(name)
|
2017-09-12 19:36:30 +02:00
|
|
|
else:
|
2017-10-22 13:28:06 +02:00
|
|
|
arch = rel
|
|
|
|
|
|
|
|
self._add_to_packages(name, arch)
|
2017-09-08 10:51:12 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
def _verify_solved(self):
|
|
|
|
if not self.solved:
|
|
|
|
raise Exception('group {} not solved'.format(self.name))
|
2017-09-07 14:45:37 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
def inherit(self, group):
|
2017-10-06 16:46:28 +02:00
|
|
|
for arch in self.architectures:
|
2017-09-15 08:14:19 +02:00
|
|
|
self.packages[arch] += group.packages[arch]
|
2017-09-08 10:51:12 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
self.locked.update(group.locked)
|
2017-09-09 12:52:22 +02:00
|
|
|
self.silents.update(group.silents)
|
2017-11-09 18:00:27 +01:00
|
|
|
self.expand_recommended.update(group.expand_recommended)
|
2017-09-09 12:52:22 +02:00
|
|
|
|
|
|
|
# do not repeat packages
|
|
|
|
def ignore(self, without):
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in ['*'] + self.pkglist.filtered_architectures:
|
2017-09-14 15:27:48 +02:00
|
|
|
s = set(without.solved_packages[arch].keys())
|
2017-09-13 08:39:47 +02:00
|
|
|
s |= set(without.solved_packages['*'].keys())
|
|
|
|
for p in s:
|
|
|
|
self.solved_packages[arch].pop(p, None)
|
2017-09-14 16:52:57 +02:00
|
|
|
for p in without.not_found.keys():
|
|
|
|
if not p in self.not_found:
|
|
|
|
continue
|
|
|
|
self.not_found[p] -= without.not_found[p]
|
|
|
|
if not self.not_found[p]:
|
|
|
|
self.not_found.pop(p)
|
2017-10-21 20:41:23 +02:00
|
|
|
for g in without.ignored:
|
|
|
|
self.ignore(g)
|
|
|
|
self.ignored.add(without)
|
2017-09-07 14:45:37 +02:00
|
|
|
|
2018-02-13 13:30:15 +01:00
|
|
|
def solve(self, ignore_recommended=False, include_suggested=False):
|
2017-08-29 18:20:58 +02:00
|
|
|
""" base: list of base groups or None """
|
|
|
|
|
|
|
|
solved = dict()
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.pkglist.filtered_architectures:
|
2017-09-12 19:36:30 +02:00
|
|
|
solved[arch] = dict()
|
2017-09-08 10:51:12 +02:00
|
|
|
|
2018-02-12 18:02:28 +01:00
|
|
|
self.srcpkgs = dict()
|
2017-09-17 12:06:38 +02:00
|
|
|
self.recommends = dict()
|
2017-11-09 18:00:27 +01:00
|
|
|
self.suggested = dict()
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.pkglist.filtered_architectures:
|
2017-09-04 14:06:15 +02:00
|
|
|
pool = self.pkglist._prepare_pool(arch)
|
2018-09-26 13:47:31 +02:00
|
|
|
solver = pool.Solver()
|
|
|
|
if ignore_recommended:
|
|
|
|
solver.set_flag(solver.SOLVER_FLAG_IGNORE_RECOMMENDED, 1)
|
|
|
|
|
2017-09-17 12:06:38 +02:00
|
|
|
# pool.set_debuglevel(10)
|
2017-11-09 18:00:27 +01:00
|
|
|
suggested = []
|
2017-09-12 19:36:30 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
# packages resulting from explicit recommended expansion
|
|
|
|
extra = []
|
|
|
|
|
|
|
|
def solve_one_package(n, group):
|
2017-09-17 13:13:40 +02:00
|
|
|
jobs = list(self.pkglist.lockjobs[arch])
|
2017-08-29 18:20:58 +02:00
|
|
|
sel = pool.select(str(n), solv.Selection.SELECTION_NAME)
|
|
|
|
if sel.isempty():
|
2017-09-14 16:52:57 +02:00
|
|
|
logger.debug('{}.{}: package {} not found'.format(self.name, arch, n))
|
|
|
|
self.not_found.setdefault(n, set()).add(arch)
|
2017-11-09 18:00:27 +01:00
|
|
|
return
|
2017-08-31 17:09:50 +02:00
|
|
|
else:
|
2017-11-09 18:00:27 +01:00
|
|
|
if n in self.expand_recommended:
|
2017-10-22 13:28:06 +02:00
|
|
|
for s in sel.solvables():
|
|
|
|
for dep in s.lookup_deparray(solv.SOLVABLE_RECOMMENDS):
|
|
|
|
# only add recommends that exist as packages
|
|
|
|
rec = pool.select(dep.str(), solv.Selection.SELECTION_NAME)
|
|
|
|
if not rec.isempty():
|
2017-11-09 18:00:27 +01:00
|
|
|
extra.append([dep.str(), group + ":recommended:" + n])
|
2017-10-22 13:28:06 +02:00
|
|
|
|
2017-08-31 17:09:50 +02:00
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
|
|
|
|
2017-10-21 07:55:57 +02:00
|
|
|
locked = self.locked | self.pkglist.unwanted
|
|
|
|
for l in locked:
|
2017-09-12 19:36:30 +02:00
|
|
|
sel = pool.select(str(l), solv.Selection.SELECTION_NAME)
|
2018-06-19 06:55:01 +02:00
|
|
|
# if we can't find it, it probably is not as important
|
|
|
|
if not sel.isempty():
|
2017-09-12 19:36:30 +02:00
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_LOCK)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
for s in self.silents:
|
2017-09-17 13:13:40 +02:00
|
|
|
sel = pool.select(str(s), solv.Selection.SELECTION_NAME | solv.Selection.SELECTION_FLAT)
|
2017-09-12 19:36:30 +02:00
|
|
|
if sel.isempty():
|
|
|
|
logger.warn('{}.{}: silent package {} not found'.format(self.name, arch, s))
|
|
|
|
else:
|
|
|
|
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
|
|
|
|
|
|
|
problems = solver.solve(jobs)
|
|
|
|
if problems:
|
|
|
|
for problem in problems:
|
2017-10-20 10:51:06 +02:00
|
|
|
msg = 'unresolvable: %s.%s: %s', self.name, arch, problem
|
|
|
|
if self.pkglist.ignore_broken:
|
|
|
|
logger.debug(msg)
|
|
|
|
else:
|
|
|
|
logger.debug(msg)
|
2017-09-14 16:52:57 +02:00
|
|
|
self.unresolvable[arch][n] = str(problem)
|
2017-11-09 18:00:27 +01:00
|
|
|
return
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
if hasattr(solver, 'get_recommended'):
|
2017-10-06 16:46:28 +02:00
|
|
|
for s in solver.get_recommended():
|
2017-10-21 07:55:57 +02:00
|
|
|
if s.name in locked:
|
|
|
|
continue
|
2017-10-06 16:46:28 +02:00
|
|
|
self.recommends.setdefault(s.name, group + ':' + n)
|
2017-10-27 17:18:37 +02:00
|
|
|
for s in solver.get_suggested():
|
2017-11-09 18:00:27 +01:00
|
|
|
suggested.append([s.name, group + ':suggested:' + n])
|
|
|
|
self.suggested.setdefault(s.name, group + ':' + n)
|
2017-10-06 16:46:28 +02:00
|
|
|
else:
|
|
|
|
logger.warn('newer libsolv needed for recommends!')
|
2017-09-17 12:06:38 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
trans = solver.transaction()
|
|
|
|
if trans.isempty():
|
|
|
|
logger.error('%s.%s: nothing to do', self.name, arch)
|
|
|
|
return
|
|
|
|
|
2017-09-12 19:36:30 +02:00
|
|
|
for s in trans.newsolvables():
|
2017-09-15 08:14:19 +02:00
|
|
|
solved[arch].setdefault(s.name, group + ':' + n)
|
2017-09-12 19:36:30 +02:00
|
|
|
reason, rule = solver.describe_decision(s)
|
|
|
|
if None:
|
|
|
|
print(self.name, s.name, reason, rule.info().problemstr())
|
|
|
|
# don't ask me why, but that's how it seems to work
|
|
|
|
if s.lookup_void(solv.SOLVABLE_SOURCENAME):
|
|
|
|
src = s.name
|
|
|
|
else:
|
|
|
|
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
|
2018-02-12 18:02:28 +01:00
|
|
|
self.srcpkgs[src] = group + ':' + s.name
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2018-09-26 13:47:31 +02:00
|
|
|
start = time.time()
|
2017-11-09 18:00:27 +01:00
|
|
|
for n, group in self.packages[arch]:
|
|
|
|
solve_one_package(n, group)
|
2018-09-26 13:47:31 +02:00
|
|
|
end = time.time()
|
|
|
|
logger.info('%s - solving took %f', self.name, end - start)
|
2017-11-09 18:00:27 +01:00
|
|
|
|
|
|
|
if include_suggested:
|
|
|
|
seen = set()
|
|
|
|
while suggested:
|
|
|
|
n, group = suggested.pop()
|
|
|
|
if n in seen:
|
|
|
|
continue
|
|
|
|
seen.add(n)
|
|
|
|
solve_one_package(n, group)
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
common = None
|
|
|
|
# compute common packages across all architectures
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.pkglist.filtered_architectures:
|
2017-08-29 18:20:58 +02:00
|
|
|
if common is None:
|
2017-09-12 19:36:30 +02:00
|
|
|
common = set(solved[arch].keys())
|
2017-08-29 18:20:58 +02:00
|
|
|
continue
|
2017-09-12 19:36:30 +02:00
|
|
|
common &= set(solved[arch].keys())
|
2017-08-31 17:09:50 +02:00
|
|
|
|
2017-09-04 17:47:12 +02:00
|
|
|
if common is None:
|
|
|
|
common = set()
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
# reduce arch specific set by common ones
|
2017-09-12 19:36:30 +02:00
|
|
|
solved['*'] = dict()
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.pkglist.filtered_architectures:
|
2017-09-12 19:36:30 +02:00
|
|
|
for p in common:
|
|
|
|
solved['*'][p] = solved[arch].pop(p)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
self.solved_packages = solved
|
|
|
|
self.solved = True
|
2017-09-15 08:14:19 +02:00
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
def check_dups(self, modules, overlap):
|
2018-02-13 13:30:15 +01:00
|
|
|
if not overlap:
|
|
|
|
return
|
2017-10-09 06:23:54 +02:00
|
|
|
packages = set(self.solved_packages['*'])
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.pkglist.filtered_architectures:
|
2017-10-09 06:23:54 +02:00
|
|
|
packages.update(self.solved_packages[arch])
|
|
|
|
for m in modules:
|
2017-10-23 12:47:24 +02:00
|
|
|
# do not check with ourselves and only once for the rest
|
2018-02-13 13:30:15 +01:00
|
|
|
if m.name <= self.name:
|
|
|
|
continue
|
2017-10-23 12:47:24 +02:00
|
|
|
if self.name in m.conflicts or m.name in self.conflicts:
|
|
|
|
continue
|
2017-10-09 06:23:54 +02:00
|
|
|
mp = set(m.solved_packages['*'])
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.pkglist.filtered_architectures:
|
2017-10-09 06:23:54 +02:00
|
|
|
mp.update(m.solved_packages[arch])
|
|
|
|
if len(packages & mp):
|
2018-06-19 06:53:23 +02:00
|
|
|
overlap.comment += '\n overlapping between ' + self.name + ' and ' + m.name + "\n"
|
2017-10-23 12:47:24 +02:00
|
|
|
for p in sorted(packages & mp):
|
2018-06-19 06:53:23 +02:00
|
|
|
for arch in m.solved_packages.keys():
|
|
|
|
if m.solved_packages[arch].get(p, None):
|
|
|
|
overlap.comment += " # " + m.name + "." + arch + ': ' + m.solved_packages[arch][p] + "\n"
|
|
|
|
if self.solved_packages[arch].get(p, None):
|
2018-09-27 07:37:07 +02:00
|
|
|
overlap.comment += " # " + self.name + "." + \
|
|
|
|
arch + ': ' + self.solved_packages[arch][p] + "\n"
|
2018-06-19 06:53:23 +02:00
|
|
|
overlap.comment += ' - ' + p + "\n"
|
2017-12-13 12:41:21 +01:00
|
|
|
overlap._add_to_packages(p)
|
2017-10-09 06:23:54 +02:00
|
|
|
|
2018-02-12 18:02:28 +01:00
|
|
|
def collect_devel_packages(self):
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.pkglist.filtered_architectures:
|
2017-09-14 15:27:48 +02:00
|
|
|
pool = self.pkglist._prepare_pool(arch)
|
2017-09-13 08:39:47 +02:00
|
|
|
sel = pool.Selection()
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
if s.name.endswith('-devel'):
|
|
|
|
# don't ask me why, but that's how it seems to work
|
|
|
|
if s.lookup_void(solv.SOLVABLE_SOURCENAME):
|
|
|
|
src = s.name
|
|
|
|
else:
|
|
|
|
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
|
|
|
|
|
2018-02-12 18:02:28 +01:00
|
|
|
if src in self.srcpkgs.keys():
|
|
|
|
self.develpkgs[s.name] = self.srcpkgs[src]
|
2017-09-17 12:06:38 +02:00
|
|
|
|
2017-11-09 18:00:27 +01:00
|
|
|
def _filter_already_selected(self, modules, pkgdict):
|
2017-09-17 12:06:38 +02:00
|
|
|
# erase our own - so we don't filter our own
|
2017-11-09 18:00:27 +01:00
|
|
|
for p in pkgdict.keys():
|
2017-09-17 12:06:38 +02:00
|
|
|
already_present = False
|
|
|
|
for m in modules:
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in ['*'] + self.pkglist.filtered_architectures:
|
2017-09-17 12:06:38 +02:00
|
|
|
already_present = already_present or (p in m.solved_packages[arch])
|
2017-11-09 18:00:27 +01:00
|
|
|
if already_present:
|
|
|
|
del pkgdict[p]
|
|
|
|
|
|
|
|
def filter_already_selected(self, modules):
|
|
|
|
self._filter_already_selected(modules, self.recommends)
|
|
|
|
self._filter_already_selected(modules, self.suggested)
|
2017-09-09 12:52:22 +02:00
|
|
|
|
2018-02-13 13:30:15 +01:00
|
|
|
def toxml(self, arch, ignore_broken=False, comment=None):
|
2018-10-01 20:57:43 +02:00
|
|
|
packages = self.solved_packages.get(arch, dict())
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
name = self.name
|
|
|
|
if arch != '*':
|
|
|
|
name += '.' + arch
|
|
|
|
|
2017-09-07 14:45:37 +02:00
|
|
|
root = ET.Element('group', {'name': name})
|
2017-12-13 12:41:21 +01:00
|
|
|
if comment:
|
|
|
|
c = ET.Comment(comment)
|
|
|
|
root.append(c)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
if arch != '*':
|
2017-09-09 12:52:22 +02:00
|
|
|
cond = ET.SubElement(root, 'conditional', {
|
|
|
|
'name': 'only_{}'.format(arch)})
|
|
|
|
packagelist = ET.SubElement(
|
|
|
|
root, 'packagelist', {'relationship': 'recommends'})
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2017-09-14 16:52:57 +02:00
|
|
|
missing = dict()
|
|
|
|
if arch == '*':
|
|
|
|
missing = self.not_found
|
|
|
|
unresolvable = self.unresolvable.get(arch, dict())
|
|
|
|
for name in sorted(packages.keys() + missing.keys() + unresolvable.keys()):
|
2017-09-13 08:39:47 +02:00
|
|
|
if name in self.silents:
|
|
|
|
continue
|
2017-09-14 16:52:57 +02:00
|
|
|
if name in missing:
|
2017-10-10 08:25:50 +02:00
|
|
|
msg = ' {} not found on {}'.format(name, ','.join(sorted(missing[name])))
|
|
|
|
if ignore_broken:
|
|
|
|
c = ET.Comment(msg)
|
|
|
|
packagelist.append(c)
|
|
|
|
continue
|
|
|
|
name = msg
|
2017-09-14 16:52:57 +02:00
|
|
|
if name in unresolvable:
|
2017-10-10 08:25:50 +02:00
|
|
|
msg = ' {} uninstallable: {}'.format(name, unresolvable[name])
|
|
|
|
if ignore_broken:
|
|
|
|
c = ET.Comment(msg)
|
|
|
|
packagelist.append(c)
|
|
|
|
continue
|
|
|
|
else:
|
2017-10-20 10:51:06 +02:00
|
|
|
logger.error(msg)
|
2017-10-10 08:25:50 +02:00
|
|
|
name = msg
|
2018-07-04 15:47:12 +02:00
|
|
|
status = self.pkglist.supportstatus(name) or self.default_support_status
|
2018-02-13 13:30:15 +01:00
|
|
|
attrs = {'name': name}
|
2017-10-06 16:46:28 +02:00
|
|
|
if status is not None:
|
|
|
|
attrs['supportstatus'] = status
|
|
|
|
p = ET.SubElement(packagelist, 'package', attrs)
|
2018-06-15 14:09:32 +02:00
|
|
|
if name in packages and packages[name]:
|
2017-10-10 08:25:50 +02:00
|
|
|
c = ET.Comment(' reason: {} '.format(packages[name]))
|
|
|
|
packagelist.append(c)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
return root
|
|
|
|
|
2018-07-16 15:16:33 +02:00
|
|
|
# just list all packages in it as an array - to be output as one yml
|
|
|
|
def summary(self):
|
|
|
|
ret = set()
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in ['*'] + self.pkglist.filtered_architectures:
|
2018-07-16 15:16:33 +02:00
|
|
|
ret |= set(self.solved_packages[arch].keys())
|
|
|
|
return ret
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
def dump(self):
|
2017-09-09 12:52:22 +02:00
|
|
|
pprint({'name': self.name, 'missing': self.missing, 'packages': self.packages,
|
|
|
|
'solved': self.solved_packages, 'silents': self.silents})
|
2017-09-12 19:36:30 +02:00
|
|
|
return
|
2018-10-01 20:57:43 +02:00
|
|
|
archs = ['*'] + self.pkglist.filtered_architectures
|
2017-09-06 20:20:21 +02:00
|
|
|
for arch in archs:
|
2017-08-29 18:20:58 +02:00
|
|
|
x = self.toxml(arch)
|
2017-09-07 14:45:37 +02:00
|
|
|
print(ET.tostring(x, pretty_print=True))
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
class PkgListGen(ToolBase.ToolBase):
|
|
|
|
|
2017-10-10 08:24:52 +02:00
|
|
|
def __init__(self):
|
2017-08-29 18:20:58 +02:00
|
|
|
ToolBase.ToolBase.__init__(self)
|
|
|
|
# package -> supportatus
|
|
|
|
self.packages = dict()
|
|
|
|
self.groups = dict()
|
|
|
|
self._supportstatus = None
|
2017-09-04 13:28:39 +02:00
|
|
|
self.input_dir = '.'
|
|
|
|
self.output_dir = '.'
|
2017-09-17 13:13:40 +02:00
|
|
|
self.lockjobs = dict()
|
2017-10-10 08:25:50 +02:00
|
|
|
self.ignore_broken = False
|
|
|
|
self.ignore_recommended = False
|
2017-11-09 18:00:27 +01:00
|
|
|
self.include_suggested = False
|
2017-11-09 15:22:27 +01:00
|
|
|
self.unwanted = set()
|
|
|
|
self.output = None
|
2017-11-24 13:39:30 +01:00
|
|
|
self.locales = set()
|
2018-10-09 07:24:30 +02:00
|
|
|
self.did_update = False
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _load_supportstatus(self):
|
|
|
|
# XXX
|
2017-10-06 16:46:28 +02:00
|
|
|
fn = os.path.join(self.input_dir, 'supportstatus.txt')
|
|
|
|
self._supportstatus = dict()
|
|
|
|
if os.path.exists(fn):
|
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
for l in fh:
|
|
|
|
# pkg, status
|
|
|
|
a = l.rstrip().split(' ')
|
|
|
|
if len(a) > 1:
|
|
|
|
self._supportstatus[a[0]] = a[1]
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def supportstatus(self, package):
|
|
|
|
if self._supportstatus is None:
|
|
|
|
self._load_supportstatus()
|
|
|
|
|
2018-07-04 15:47:12 +02:00
|
|
|
return self._supportstatus.get(package)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _load_group_file(self, fn):
|
2017-09-14 15:27:48 +02:00
|
|
|
output = None
|
2017-10-21 07:55:57 +02:00
|
|
|
unwanted = None
|
2017-08-29 18:20:58 +02:00
|
|
|
with open(fn, 'r') as fh:
|
|
|
|
logger.debug("reading %s", fn)
|
2017-09-06 10:48:47 +02:00
|
|
|
for groupname, group in yaml.safe_load(fh).items():
|
2017-09-14 15:27:48 +02:00
|
|
|
if groupname == 'OUTPUT':
|
|
|
|
output = group
|
|
|
|
continue
|
2017-10-21 07:55:57 +02:00
|
|
|
if groupname == 'UNWANTED':
|
|
|
|
unwanted = set(group)
|
|
|
|
continue
|
2017-09-12 19:36:30 +02:00
|
|
|
g = Group(groupname, self)
|
|
|
|
g.parse_yml(group)
|
2017-10-21 07:55:57 +02:00
|
|
|
return output, unwanted
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def load_all_groups(self):
|
2017-09-06 10:48:47 +02:00
|
|
|
for fn in glob.glob(os.path.join(self.input_dir, 'group*.yml')):
|
2017-10-21 07:55:57 +02:00
|
|
|
o, u = self._load_group_file(fn)
|
2017-11-09 15:22:27 +01:00
|
|
|
if o:
|
|
|
|
if self.output is not None:
|
|
|
|
raise Exception('OUTPUT defined multiple times')
|
|
|
|
self.output = o
|
|
|
|
if u:
|
|
|
|
self.unwanted |= u
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _write_all_groups(self):
|
2017-09-04 14:06:15 +02:00
|
|
|
self._check_supplements()
|
2018-07-16 15:16:33 +02:00
|
|
|
summary = dict()
|
2017-10-06 16:46:28 +02:00
|
|
|
archs = ['*'] + self.architectures
|
2017-08-29 18:20:58 +02:00
|
|
|
for name in self.groups:
|
|
|
|
group = self.groups[name]
|
|
|
|
if not group.solved:
|
2017-09-14 15:27:48 +02:00
|
|
|
continue
|
2018-07-16 15:16:33 +02:00
|
|
|
summary[name] = group.summary()
|
2017-12-13 12:41:21 +01:00
|
|
|
fn = '{}.group'.format(group.name)
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.output_dir, fn), 'w') as fh:
|
2017-12-13 12:41:21 +01:00
|
|
|
comment = group.comment
|
2017-09-06 20:20:21 +02:00
|
|
|
for arch in archs:
|
2017-12-13 12:41:21 +01:00
|
|
|
x = group.toxml(arch, self.ignore_broken, comment)
|
|
|
|
# only comment first time
|
|
|
|
comment = None
|
2017-09-12 19:36:30 +02:00
|
|
|
x = ET.tostring(x, pretty_print=True)
|
|
|
|
x = re.sub('\s*<!-- reason:', ' <!-- reason:', x)
|
2017-09-07 14:45:37 +02:00
|
|
|
# fh.write(ET.tostring(x, pretty_print = True, doctype = '<?xml version="1.0" encoding="UTF-8"?>'))
|
2017-09-12 19:36:30 +02:00
|
|
|
fh.write(x)
|
2018-07-16 15:16:33 +02:00
|
|
|
return summary
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def _parse_product(self, root):
|
|
|
|
print(root.find('.//products/product/name').text)
|
|
|
|
for mnode in root.findall(".//mediasets/media"):
|
|
|
|
name = mnode.get('name')
|
|
|
|
print(' {}'.format(name))
|
|
|
|
for node in mnode.findall(".//use"):
|
|
|
|
print(' {}'.format(node.get('group')))
|
|
|
|
|
|
|
|
def list_products(self):
|
|
|
|
for fn in glob.glob('*.product'):
|
2017-09-04 13:28:39 +02:00
|
|
|
with open(os.path.join(self.input_dir, fn), 'r') as fh:
|
2017-08-29 18:20:58 +02:00
|
|
|
logger.debug("reading %s", fn)
|
|
|
|
root = ET.parse(fh).getroot()
|
|
|
|
self._parse_product(root)
|
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
def solve_module(self, groupname, includes, excludes):
|
2017-09-14 15:27:48 +02:00
|
|
|
g = self.groups[groupname]
|
|
|
|
for i in includes:
|
|
|
|
g.inherit(self.groups[i])
|
2017-11-09 18:00:27 +01:00
|
|
|
g.solve(self.ignore_recommended, self.include_suggested)
|
2017-09-14 15:27:48 +02:00
|
|
|
for e in excludes:
|
2017-09-14 16:52:57 +02:00
|
|
|
g.ignore(self.groups[e])
|
2017-09-04 14:06:15 +02:00
|
|
|
|
2018-08-02 17:05:16 +08:00
|
|
|
def expand_repos(self, project, repo='standard'):
|
2018-08-21 02:18:37 -05:00
|
|
|
return repository_path_expand(self.apiurl, project, repo)
|
2018-07-02 20:36:50 +02:00
|
|
|
|
2017-09-04 14:06:15 +02:00
|
|
|
def _check_supplements(self):
|
|
|
|
tocheck = set()
|
2018-05-14 15:21:46 +02:00
|
|
|
tocheck_locales = set()
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.filtered_architectures:
|
2017-09-04 14:06:15 +02:00
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
sel.add_raw(solv.Job.SOLVER_SOLVABLE, s.id)
|
|
|
|
|
|
|
|
for s in sel.solvables():
|
|
|
|
for dep in s.lookup_deparray(solv.SOLVABLE_SUPPLEMENTS):
|
|
|
|
for d in dep.str().split(' '):
|
|
|
|
if d.startswith('namespace:modalias') or d.startswith('namespace:filesystem'):
|
|
|
|
tocheck.add(s.name)
|
|
|
|
|
2018-05-14 15:21:46 +02:00
|
|
|
for l in self.locales:
|
|
|
|
i = pool.str2id('locale({})'.format(l))
|
|
|
|
for s in pool.whatprovides(i):
|
|
|
|
tocheck_locales.add(s.name)
|
|
|
|
|
2017-09-04 14:06:15 +02:00
|
|
|
all_grouped = set()
|
|
|
|
for g in self.groups.values():
|
|
|
|
if g.solved:
|
|
|
|
for arch in g.solved_packages.keys():
|
2017-09-04 15:39:52 +02:00
|
|
|
if g.solved_packages[arch]:
|
|
|
|
all_grouped.update(g.solved_packages[arch])
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
for p in tocheck - all_grouped:
|
|
|
|
logger.warn('package %s has supplements but is not grouped', p)
|
|
|
|
|
2018-05-14 15:21:46 +02:00
|
|
|
for p in tocheck_locales - all_grouped:
|
|
|
|
logger.warn('package %s provides supported locale but is not grouped', p)
|
|
|
|
|
2017-09-04 14:06:15 +02:00
|
|
|
def _prepare_pool(self, arch):
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch(arch)
|
|
|
|
|
2017-09-17 13:13:40 +02:00
|
|
|
self.lockjobs[arch] = []
|
|
|
|
solvables = set()
|
2017-11-24 13:39:30 +01:00
|
|
|
|
2018-07-02 20:36:50 +02:00
|
|
|
for project, reponame in self.repos:
|
2017-09-09 12:52:22 +02:00
|
|
|
repo = pool.add_repo(project)
|
2017-09-17 13:13:40 +02:00
|
|
|
s = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, reponame, arch))
|
2017-09-09 12:52:22 +02:00
|
|
|
r = repo.add_solv(s)
|
|
|
|
if not r:
|
2018-10-09 07:24:30 +02:00
|
|
|
if not self.did_update:
|
|
|
|
raise Exception(
|
|
|
|
"failed to add repo {}/{}/{}. Need to run update first?".format(project, reponame, arch))
|
|
|
|
continue
|
2017-09-17 13:13:40 +02:00
|
|
|
for solvable in repo.solvables_iter():
|
2017-10-17 09:10:17 +02:00
|
|
|
if solvable.name in solvables:
|
2018-02-13 13:30:15 +01:00
|
|
|
self.lockjobs[arch].append(pool.Job(solv.Job.SOLVER_SOLVABLE | solv.Job.SOLVER_LOCK, solvable.id))
|
2017-09-17 13:13:40 +02:00
|
|
|
solvables.add(solvable.name)
|
2017-09-04 14:06:15 +02:00
|
|
|
|
|
|
|
pool.addfileprovides()
|
|
|
|
pool.createwhatprovides()
|
|
|
|
|
2018-02-13 17:32:48 +01:00
|
|
|
# https://github.com/openSUSE/libsolv/issues/231
|
|
|
|
if hasattr(pool, 'set_namespaceproviders'):
|
|
|
|
for l in self.locales:
|
|
|
|
pool.set_namespaceproviders(solv.NAMESPACE_LANGUAGE, pool.Dep(l), True)
|
|
|
|
else:
|
|
|
|
logger.warn('libsolv missing set_namespaceproviders()')
|
|
|
|
|
2017-09-04 14:06:15 +02:00
|
|
|
return pool
|
2017-09-04 15:39:52 +02:00
|
|
|
|
2018-02-12 15:56:23 +01:00
|
|
|
# parse file and merge all groups
|
|
|
|
def _parse_unneeded(self, filename):
|
|
|
|
filename = os.path.join(self.input_dir, filename)
|
|
|
|
if not os.path.isfile(filename):
|
|
|
|
return set()
|
|
|
|
fh = open(filename, 'r')
|
|
|
|
logger.debug("reading %s", filename)
|
|
|
|
result = set()
|
|
|
|
for groupname, group in yaml.safe_load(fh).items():
|
|
|
|
result.update(group)
|
|
|
|
return result
|
|
|
|
|
2018-06-15 14:09:32 +02:00
|
|
|
# the unsorted group is special and will contain all the rest for
|
|
|
|
# the FTP tree. We filter it with unneeded though to create a
|
|
|
|
# unsorted.yml file for release manager review
|
|
|
|
def _collect_unsorted_packages(self, modules, unsorted):
|
2018-02-12 15:56:23 +01:00
|
|
|
uneeded_regexps = [re.compile(r)
|
|
|
|
for r in self._parse_unneeded('unneeded.yml')]
|
|
|
|
|
2017-09-04 17:18:56 +02:00
|
|
|
packages = dict()
|
2018-06-15 14:09:32 +02:00
|
|
|
if unsorted:
|
|
|
|
unsorted.solved_packages = dict()
|
|
|
|
unsorted.solved_packages['*'] = dict()
|
|
|
|
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.filtered_architectures:
|
2017-09-04 17:18:56 +02:00
|
|
|
pool = self._prepare_pool(arch)
|
|
|
|
sel = pool.Selection()
|
2018-02-12 15:56:23 +01:00
|
|
|
archpacks = [s.name for s in pool.solvables_iter()]
|
2018-06-15 14:09:32 +02:00
|
|
|
|
|
|
|
# copy
|
|
|
|
filtered = list(archpacks)
|
2018-02-12 15:56:23 +01:00
|
|
|
for r in uneeded_regexps:
|
2018-06-15 14:09:32 +02:00
|
|
|
filtered = [p for p in filtered if not r.match(p)]
|
2017-09-04 17:18:56 +02:00
|
|
|
|
2018-02-12 15:56:23 +01:00
|
|
|
# convert to set
|
2018-06-15 14:09:32 +02:00
|
|
|
filtered = set(filtered) - self.unwanted
|
2017-10-09 06:23:54 +02:00
|
|
|
for g in modules:
|
2018-06-15 14:09:32 +02:00
|
|
|
if unsorted and g == unsorted:
|
|
|
|
continue
|
2017-10-09 06:23:54 +02:00
|
|
|
for a in ('*', arch):
|
2018-06-15 14:09:32 +02:00
|
|
|
filtered -= set(g.solved_packages[a].keys())
|
|
|
|
for package in filtered:
|
2017-10-09 06:23:54 +02:00
|
|
|
packages.setdefault(package, []).append(arch)
|
|
|
|
|
2018-06-15 14:09:32 +02:00
|
|
|
if unsorted:
|
|
|
|
archpacks = set(archpacks)
|
|
|
|
unsorted.solved_packages[arch] = dict()
|
|
|
|
for g in modules:
|
|
|
|
archpacks -= set(g.solved_packages[arch].keys())
|
|
|
|
archpacks -= set(g.solved_packages['*'].keys())
|
|
|
|
unsorted.solved_packages[arch] = dict()
|
|
|
|
for p in archpacks:
|
|
|
|
unsorted.solved_packages[arch][p] = None
|
|
|
|
|
|
|
|
if unsorted:
|
|
|
|
common = None
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.filtered_architectures:
|
2018-06-15 14:09:32 +02:00
|
|
|
if common is None:
|
|
|
|
common = set(unsorted.solved_packages[arch].keys())
|
|
|
|
continue
|
|
|
|
common &= set(unsorted.solved_packages[arch].keys())
|
|
|
|
for p in common:
|
|
|
|
unsorted.solved_packages['*'][p] = None
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.filtered_architectures:
|
2018-06-15 14:09:32 +02:00
|
|
|
del unsorted.solved_packages[arch][p]
|
|
|
|
|
2017-10-09 06:23:54 +02:00
|
|
|
with open(os.path.join(self.output_dir, 'unsorted.yml'), 'w') as fh:
|
|
|
|
fh.write("unsorted:\n")
|
|
|
|
for p in sorted(packages.keys()):
|
|
|
|
fh.write(" - ")
|
|
|
|
fh.write(p)
|
2018-10-01 20:57:43 +02:00
|
|
|
if len(packages[p]) != len(self.filtered_architectures):
|
2017-10-09 06:23:54 +02:00
|
|
|
fh.write(": [")
|
|
|
|
fh.write(','.join(sorted(packages[p])))
|
|
|
|
fh.write("]")
|
2018-02-12 18:02:28 +01:00
|
|
|
reason = self._find_reason(p, modules)
|
|
|
|
if reason:
|
|
|
|
fh.write(' # ' + reason)
|
2017-10-09 06:23:54 +02:00
|
|
|
fh.write(" \n")
|
2017-09-06 10:48:47 +02:00
|
|
|
|
2018-02-12 18:02:28 +01:00
|
|
|
# give a hint if the package is related to a group
|
|
|
|
def _find_reason(self, package, modules):
|
|
|
|
# go through the modules multiple times to find the "best"
|
|
|
|
for g in modules:
|
2018-02-13 13:30:15 +01:00
|
|
|
if package in g.recommends:
|
|
|
|
return 'recommended by ' + g.recommends[package]
|
2018-02-12 18:02:28 +01:00
|
|
|
for g in modules:
|
2018-02-13 13:30:15 +01:00
|
|
|
if package in g.suggested:
|
|
|
|
return 'suggested by ' + g.suggested[package]
|
2018-02-12 18:02:28 +01:00
|
|
|
for g in modules:
|
2018-02-13 13:30:15 +01:00
|
|
|
if package in g.develpkgs:
|
|
|
|
return 'devel package of ' + g.develpkgs[package]
|
2018-02-12 18:02:28 +01:00
|
|
|
return None
|
|
|
|
|
2018-02-13 13:30:15 +01:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
class CommandLineInterface(ToolBase.CommandLineInterface):
|
2018-08-02 17:05:16 +08:00
|
|
|
SCOPES = ['all', 'target', 'rings', 'staging', 'arm']
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToolBase.CommandLineInterface.__init__(self, args, kwargs)
|
2018-07-02 20:36:50 +02:00
|
|
|
self.repos = []
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def get_optparser(self):
|
|
|
|
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
2017-09-04 13:28:39 +02:00
|
|
|
parser.add_option('-i', '--input-dir', dest='input_dir', metavar='DIR',
|
2017-09-07 14:45:37 +02:00
|
|
|
help='input directory', default='.')
|
2017-09-04 13:28:39 +02:00
|
|
|
parser.add_option('-o', '--output-dir', dest='output_dir', metavar='DIR',
|
2017-09-07 14:45:37 +02:00
|
|
|
help='input directory', default='.')
|
2017-10-06 16:46:28 +02:00
|
|
|
parser.add_option('-a', '--architecture', dest='architectures', metavar='ARCH',
|
|
|
|
help='architecure', action='append')
|
2017-08-29 18:20:58 +02:00
|
|
|
return parser
|
|
|
|
|
|
|
|
def setup_tool(self):
|
2017-10-10 08:24:52 +02:00
|
|
|
tool = PkgListGen()
|
2017-09-04 13:28:39 +02:00
|
|
|
tool.input_dir = self.options.input_dir
|
|
|
|
tool.output_dir = self.options.output_dir
|
2018-07-02 20:36:50 +02:00
|
|
|
tool.repos = self.repos
|
2017-10-06 16:46:28 +02:00
|
|
|
if self.options.architectures:
|
|
|
|
tool.architectures = self.options.architectures
|
|
|
|
else:
|
|
|
|
tool.architectures = ARCHITECTURES
|
2017-08-29 18:20:58 +02:00
|
|
|
return tool
|
|
|
|
|
|
|
|
def do_list(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all groups
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.load_all_groups()
|
|
|
|
|
|
|
|
for name in sorted(self.tool.groups.keys()):
|
2017-12-11 14:10:57 +01:00
|
|
|
print(name)
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
def do_list_products(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: list all products
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.list_products()
|
|
|
|
|
2017-08-31 16:05:07 +02:00
|
|
|
def do_update(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2017-09-09 12:52:22 +02:00
|
|
|
# only there to parse the repos
|
2018-01-09 17:24:38 -06:00
|
|
|
bs_mirrorfull = os.path.join(SCRIPT_PATH, 'bs_mirrorfull')
|
2017-10-20 10:51:06 +02:00
|
|
|
global_update = False
|
2018-07-02 20:36:50 +02:00
|
|
|
for project, repo in self.repos:
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in opts.filtered_architectures:
|
2017-11-13 17:39:52 +01:00
|
|
|
# TODO: refactor to common function with repo_checker.py
|
|
|
|
d = os.path.join(CACHEDIR, project, repo, arch)
|
|
|
|
if not os.path.exists(d):
|
|
|
|
os.makedirs(d)
|
|
|
|
|
2018-09-27 07:37:07 +02:00
|
|
|
try:
|
|
|
|
# Fetch state before mirroring in-case it changes during download.
|
|
|
|
state = repository_arch_state(self.tool.apiurl, project, repo, arch)
|
|
|
|
except HTTPError:
|
|
|
|
continue
|
2018-09-20 10:24:29 -05:00
|
|
|
|
|
|
|
# Would be preferable to include hash in name, but cumbersome to handle without
|
|
|
|
# reworking a fair bit since the state needs to be tracked.
|
|
|
|
solv_file = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
|
|
|
solv_file_hash = '{}::{}'.format(solv_file, state)
|
|
|
|
if os.path.exists(solv_file) and os.path.exists(solv_file_hash):
|
|
|
|
# Solve file exists and hash unchanged, skip updating solv.
|
|
|
|
logger.debug('skipping solv generation for {} due to matching state {}'.format(
|
|
|
|
'/'.join([project, repo, arch]), state))
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Either hash changed or new, so remove any old hash files.
|
|
|
|
self.unlink_list(None, glob.glob(solv_file + '::*'))
|
|
|
|
global_update = True
|
|
|
|
|
2017-09-09 12:52:22 +02:00
|
|
|
logger.debug('updating %s', d)
|
2017-10-20 10:51:06 +02:00
|
|
|
args = [bs_mirrorfull]
|
2017-11-09 15:03:03 +01:00
|
|
|
args.append('--nodebug')
|
2017-11-13 17:39:52 +01:00
|
|
|
args.append('{}/public/build/{}/{}/{}'.format(self.tool.apiurl, project, repo, arch))
|
2017-10-20 10:51:06 +02:00
|
|
|
args.append(d)
|
|
|
|
p = subprocess.Popen(args, stdout=subprocess.PIPE)
|
|
|
|
for line in p.stdout:
|
2017-11-13 17:39:52 +01:00
|
|
|
logger.info(line.rstrip())
|
|
|
|
|
2017-09-09 12:52:22 +02:00
|
|
|
files = [os.path.join(d, f)
|
|
|
|
for f in os.listdir(d) if f.endswith('.rpm')]
|
2017-11-13 17:39:52 +01:00
|
|
|
fh = open(solv_file, 'w')
|
2017-09-09 12:52:22 +02:00
|
|
|
p = subprocess.Popen(
|
|
|
|
['rpms2solv', '-m', '-', '-0'], stdin=subprocess.PIPE, stdout=fh)
|
|
|
|
p.communicate('\0'.join(files))
|
|
|
|
p.wait()
|
|
|
|
fh.close()
|
2018-09-20 10:24:29 -05:00
|
|
|
|
|
|
|
# Create hash file now that solv creation is complete.
|
|
|
|
open(solv_file_hash, 'a').close()
|
2018-10-09 07:24:30 +02:00
|
|
|
self.did_update = True
|
2018-09-20 10:24:29 -05:00
|
|
|
|
2017-10-20 10:51:06 +02:00
|
|
|
return global_update
|
2017-08-31 16:05:07 +02:00
|
|
|
|
2018-02-05 19:47:54 -06:00
|
|
|
def update_merge(self, nonfree):
|
|
|
|
"""Merge free and nonfree solv files or copy free to merged"""
|
2018-07-02 20:36:50 +02:00
|
|
|
for project, repo in self.repos:
|
2018-02-05 19:47:54 -06:00
|
|
|
for arch in self.tool.architectures:
|
|
|
|
solv_file = os.path.join(
|
|
|
|
CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
|
|
|
solv_file_merged = os.path.join(
|
|
|
|
CACHEDIR, 'repo-{}-{}-{}.merged.solv'.format(project, repo, arch))
|
|
|
|
|
|
|
|
if not nonfree:
|
|
|
|
shutil.copyfile(solv_file, solv_file_merged)
|
|
|
|
continue
|
|
|
|
|
|
|
|
solv_file_nonfree = os.path.join(
|
|
|
|
CACHEDIR, 'repo-{}-{}-{}.solv'.format(nonfree, repo, arch))
|
2018-02-15 09:29:19 -06:00
|
|
|
self.solv_merge(solv_file_merged, solv_file, solv_file_nonfree)
|
|
|
|
|
|
|
|
def solv_merge(self, solv_merged, *solvs):
|
2018-03-12 08:17:10 +01:00
|
|
|
solvs = list(solvs) # From tuple.
|
2018-02-05 19:47:54 -06:00
|
|
|
|
2018-02-15 09:28:33 -06:00
|
|
|
if os.path.exists(solv_merged):
|
2018-02-15 09:29:19 -06:00
|
|
|
modified = map(os.path.getmtime, [solv_merged] + solvs)
|
|
|
|
if max(modified) <= modified[0]:
|
2018-02-15 09:28:33 -06:00
|
|
|
# The two inputs were modified before or at the same as merged.
|
|
|
|
logger.debug('merge skipped for {}'.format(solv_merged))
|
|
|
|
return
|
2018-02-05 19:47:54 -06:00
|
|
|
|
|
|
|
with open(solv_merged, 'w') as handle:
|
2018-02-15 09:29:19 -06:00
|
|
|
p = subprocess.Popen(['mergesolv'] + solvs, stdout=handle)
|
2018-02-05 19:47:54 -06:00
|
|
|
p.communicate()
|
|
|
|
|
|
|
|
if p.returncode:
|
|
|
|
raise Exception('failed to create merged solv file')
|
|
|
|
|
2018-03-12 08:17:10 +01:00
|
|
|
def do_create_sle_weakremovers(self, subcmd, opts, *prjs):
|
|
|
|
for prj in prjs:
|
|
|
|
logger.debug("processing %s", prj)
|
2018-08-02 17:51:00 +08:00
|
|
|
self.tool.expand_repos(prj, 'standard')
|
2018-03-12 08:17:10 +01:00
|
|
|
opts.project = prj
|
|
|
|
self.do_update('update', opts)
|
|
|
|
|
|
|
|
drops = dict()
|
|
|
|
for arch in self.tool.architectures:
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch(arch)
|
|
|
|
|
|
|
|
sysrepo = None
|
|
|
|
for prp in prjs:
|
|
|
|
fn = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(prp, 'standard', arch))
|
2018-07-02 20:36:50 +02:00
|
|
|
r = pool.add_repo('/'.join([prj, 'standard']))
|
2018-03-12 08:17:10 +01:00
|
|
|
r.add_solv(fn)
|
|
|
|
if not sysrepo:
|
|
|
|
sysrepo = r
|
|
|
|
|
|
|
|
pool.createwhatprovides()
|
|
|
|
|
|
|
|
for s in pool.solvables_iter():
|
|
|
|
if s.repo == sysrepo or not (s.arch == 'noarch' or s.arch == arch):
|
|
|
|
continue
|
|
|
|
haveit = False
|
|
|
|
for s2 in pool.whatprovides(s.nameid):
|
|
|
|
if s2.repo == sysrepo and s.nameid == s2.nameid:
|
|
|
|
haveit = True
|
|
|
|
if haveit:
|
|
|
|
continue
|
|
|
|
nevr = pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
|
|
|
|
for s2 in pool.whatmatchesdep(solv.SOLVABLE_OBSOLETES, nevr):
|
|
|
|
if s2.repo == sysrepo:
|
|
|
|
continue
|
|
|
|
haveit = True
|
|
|
|
if haveit:
|
|
|
|
continue
|
|
|
|
if s.name not in drops:
|
|
|
|
drops[s.name] = {'repo': s.repo.name, 'archs': []}
|
|
|
|
if arch not in drops[s.name]['archs']:
|
|
|
|
drops[s.name]['archs'].append(arch)
|
|
|
|
for prp in prjs:
|
|
|
|
exclusives = dict()
|
|
|
|
print('#', prp)
|
|
|
|
for name in sorted(drops.keys()):
|
|
|
|
if drops[name]['repo'] != prp:
|
|
|
|
continue
|
|
|
|
if len(drops[name]['archs']) == len(self.tool.architectures):
|
|
|
|
print('Provides: weakremover({})'.format(name))
|
|
|
|
else:
|
|
|
|
jarch = ' '.join(sorted(drops[name]['archs']))
|
|
|
|
exclusives.setdefault(jarch, []).append(name)
|
|
|
|
for arch in sorted(exclusives.keys()):
|
|
|
|
print('%ifarch {}'.format(arch))
|
|
|
|
for name in sorted(exclusives[arch]):
|
|
|
|
print('Provides: weakremover({})'.format(name))
|
|
|
|
print('%endif')
|
|
|
|
|
2017-12-04 18:23:33 +01:00
|
|
|
def do_create_droplist(self, subcmd, opts, *oldsolv):
|
|
|
|
"""${cmd_name}: generate list of obsolete packages
|
|
|
|
|
|
|
|
The globally specified repositories are taken as the current
|
|
|
|
package set. All solv files specified on the command line
|
|
|
|
are old versions of those repos.
|
|
|
|
|
|
|
|
The command outputs all package names that are no longer
|
|
|
|
contained in or provided by the current repos.
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
drops = dict()
|
|
|
|
|
|
|
|
for arch in self.tool.architectures:
|
|
|
|
|
|
|
|
for old in oldsolv:
|
|
|
|
|
|
|
|
logger.debug("%s: processing %s", arch, old)
|
|
|
|
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch(arch)
|
|
|
|
|
2018-07-02 20:36:50 +02:00
|
|
|
for project, repo in self.tool.repos:
|
2017-12-04 18:23:33 +01:00
|
|
|
fn = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch))
|
2018-07-10 13:26:02 +02:00
|
|
|
r = pool.add_repo(project)
|
2017-12-04 18:23:33 +01:00
|
|
|
r.add_solv(fn)
|
|
|
|
|
2018-02-05 19:47:54 -06:00
|
|
|
sysrepo = pool.add_repo(os.path.basename(old).replace('.merged.solv', ''))
|
2017-12-04 18:23:33 +01:00
|
|
|
sysrepo.add_solv(old)
|
|
|
|
|
|
|
|
pool.createwhatprovides()
|
|
|
|
|
|
|
|
for s in sysrepo.solvables:
|
|
|
|
haveit = False
|
|
|
|
for s2 in pool.whatprovides(s.nameid):
|
|
|
|
if s2.repo == sysrepo or s.nameid != s2.nameid:
|
|
|
|
continue
|
|
|
|
haveit = True
|
|
|
|
if haveit:
|
|
|
|
continue
|
|
|
|
nevr = pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
|
|
|
|
for s2 in pool.whatmatchesdep(solv.SOLVABLE_OBSOLETES, nevr):
|
|
|
|
if s2.repo == sysrepo:
|
|
|
|
continue
|
|
|
|
haveit = True
|
|
|
|
if haveit:
|
|
|
|
continue
|
|
|
|
if s.name not in drops:
|
|
|
|
drops[s.name] = sysrepo.name
|
|
|
|
|
|
|
|
# mark it explicitly to avoid having 2 pools while GC is not run
|
|
|
|
del pool
|
|
|
|
|
2018-02-05 19:48:45 -06:00
|
|
|
ofh = sys.stdout
|
|
|
|
if self.options.output_dir:
|
|
|
|
name = os.path.join(self.options.output_dir, 'obsoletepackages.inc')
|
|
|
|
ofh = open(name, 'w')
|
|
|
|
|
2017-12-04 18:23:33 +01:00
|
|
|
for reponame in sorted(set(drops.values())):
|
2018-02-05 19:48:45 -06:00
|
|
|
print("<!-- %s -->" % reponame, file=ofh)
|
2017-12-04 18:23:33 +01:00
|
|
|
for p in sorted(drops):
|
2018-02-13 13:30:15 +01:00
|
|
|
if drops[p] != reponame:
|
|
|
|
continue
|
2018-02-05 19:48:45 -06:00
|
|
|
print(" <obsoletepackage>%s</obsoletepackage>" % p, file=ofh)
|
2017-12-04 18:23:33 +01:00
|
|
|
|
2017-12-06 16:25:09 +01:00
|
|
|
@cmdln.option('--overwrite', action='store_true', help='overwrite if output file exists')
|
|
|
|
def do_dump_solv(self, subcmd, opts, baseurl):
|
|
|
|
"""${cmd_name}: fetch repomd and dump solv
|
|
|
|
|
2018-02-05 19:55:32 -06:00
|
|
|
Dumps solv from published repository. Use solve to generate from
|
|
|
|
pre-published repository.
|
|
|
|
|
2017-12-06 16:25:09 +01:00
|
|
|
If an output directory is specified, a file named according
|
|
|
|
to the build is created there. Otherwise the solv file is
|
|
|
|
dumped to stdout.
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
name = None
|
|
|
|
ofh = sys.stdout
|
|
|
|
if self.options.output_dir:
|
2018-02-05 19:51:12 -06:00
|
|
|
build, repo_style = self.dump_solv_build(baseurl)
|
2018-03-08 16:43:18 -06:00
|
|
|
name = os.path.join(self.options.output_dir, '{}.solv'.format(build))
|
2018-02-15 09:32:12 -06:00
|
|
|
# For update repo name never changes so always update.
|
|
|
|
if not opts.overwrite and repo_style != 'update' and os.path.exists(name):
|
2017-12-06 16:25:09 +01:00
|
|
|
logger.info("%s exists", name)
|
2018-02-05 19:54:43 -06:00
|
|
|
return name
|
2017-12-06 16:25:09 +01:00
|
|
|
|
|
|
|
pool = solv.Pool()
|
|
|
|
pool.setarch()
|
|
|
|
|
|
|
|
repo = pool.add_repo(''.join(random.choice(string.letters) for _ in range(5)))
|
2018-02-05 19:51:12 -06:00
|
|
|
path_prefix = 'suse/' if name and repo_style == 'build' else ''
|
|
|
|
url = urlparse.urljoin(baseurl, path_prefix + 'repodata/repomd.xml')
|
2017-12-06 16:25:09 +01:00
|
|
|
repomd = requests.get(url)
|
2018-02-13 13:30:15 +01:00
|
|
|
ns = {'r': 'http://linux.duke.edu/metadata/repo'}
|
2017-12-06 16:25:09 +01:00
|
|
|
root = ET.fromstring(repomd.content)
|
2018-03-08 12:23:21 -06:00
|
|
|
primary_element = root.find('.//r:data[@type="primary"]', ns)
|
|
|
|
location = primary_element.find('r:location', ns).get('href')
|
2018-03-08 16:44:17 -06:00
|
|
|
sha256_expected = primary_element.find('r:checksum[@type="sha256"]', ns).text
|
|
|
|
|
|
|
|
# No build information in update repo to use repomd checksum in name.
|
|
|
|
if repo_style == 'update':
|
|
|
|
name = os.path.join(self.options.output_dir, '{}::{}.solv'.format(build, sha256_expected))
|
|
|
|
if not opts.overwrite and os.path.exists(name):
|
|
|
|
logger.info("%s exists", name)
|
|
|
|
return name
|
|
|
|
|
|
|
|
# Only consider latest update repo so remove old versions.
|
|
|
|
# Pre-release builds only make sense for non-update repos and once
|
|
|
|
# releases then only relevant for next product which does not
|
|
|
|
# consider pre-release from previous version.
|
|
|
|
for old_solv in glob.glob(os.path.join(self.options.output_dir, '{}::*.solv'.format(build))):
|
|
|
|
os.remove(old_solv)
|
|
|
|
|
|
|
|
f = tempfile.TemporaryFile()
|
2017-12-06 16:25:09 +01:00
|
|
|
f.write(repomd.content)
|
2018-03-08 12:30:53 -06:00
|
|
|
f.flush()
|
2017-12-06 16:25:09 +01:00
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
repo.add_repomdxml(f, 0)
|
2018-02-05 19:51:12 -06:00
|
|
|
url = urlparse.urljoin(baseurl, path_prefix + location)
|
2017-12-06 16:25:09 +01:00
|
|
|
with requests.get(url, stream=True) as primary:
|
2018-03-08 12:23:21 -06:00
|
|
|
sha256 = hashlib.sha256(primary.content).hexdigest()
|
|
|
|
if sha256 != sha256_expected:
|
|
|
|
raise Exception('checksums do not match {} != {}'.format(sha256, sha256_expected))
|
|
|
|
|
2018-03-08 12:22:02 -06:00
|
|
|
content = gzip.GzipFile(fileobj=io.BytesIO(primary.content))
|
2017-12-06 16:25:09 +01:00
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
f.write(content.read())
|
2018-03-08 12:30:53 -06:00
|
|
|
f.flush()
|
2017-12-06 16:25:09 +01:00
|
|
|
os.lseek(f.fileno(), 0, os.SEEK_SET)
|
|
|
|
repo.add_rpmmd(f, None, 0)
|
|
|
|
repo.create_stubs()
|
2018-03-08 16:44:17 -06:00
|
|
|
|
|
|
|
ofh = open(name + '.new', 'w')
|
2017-12-06 16:25:09 +01:00
|
|
|
repo.write(ofh)
|
|
|
|
|
|
|
|
if name is not None:
|
2018-02-15 09:32:12 -06:00
|
|
|
# Only update file if overwrite or different.
|
2018-03-12 08:17:10 +01:00
|
|
|
ofh.flush() # Ensure entirely written before comparing.
|
2018-02-15 09:32:12 -06:00
|
|
|
if not opts.overwrite and os.path.exists(name) and filecmp.cmp(name + '.new', name, shallow=False):
|
|
|
|
logger.debug('file identical, skip dumping')
|
|
|
|
os.remove(name + '.new')
|
|
|
|
else:
|
|
|
|
os.rename(name + '.new', name)
|
2018-02-05 19:54:43 -06:00
|
|
|
return name
|
2017-12-04 18:23:33 +01:00
|
|
|
|
2018-02-05 19:51:12 -06:00
|
|
|
def dump_solv_build(self, baseurl):
|
|
|
|
"""Determine repo format and build string from remote repository."""
|
2018-02-15 09:32:12 -06:00
|
|
|
if 'update' in baseurl:
|
|
|
|
# Could look at .repo file or repomd.xml, but larger change.
|
|
|
|
return 'update-' + os.path.basename(os.path.normpath(baseurl)), 'update'
|
|
|
|
|
2018-02-05 19:51:12 -06:00
|
|
|
url = urlparse.urljoin(baseurl, 'media.1/media')
|
|
|
|
with requests.get(url) as media:
|
|
|
|
for i, line in enumerate(media.iter_lines()):
|
|
|
|
if i != 1:
|
|
|
|
continue
|
|
|
|
name = line
|
|
|
|
|
|
|
|
if name is not None and '-Build' in name:
|
|
|
|
return name, 'media'
|
|
|
|
|
|
|
|
url = urlparse.urljoin(baseurl, 'media.1/build')
|
|
|
|
with requests.get(url) as build:
|
|
|
|
name = build.content.strip()
|
|
|
|
|
|
|
|
if name is not None and '-Build' in name:
|
|
|
|
return name, 'build'
|
|
|
|
|
|
|
|
raise Exception('media.1/{media,build} includes no build number')
|
|
|
|
|
2017-10-10 08:25:50 +02:00
|
|
|
@cmdln.option('--ignore-unresolvable', action='store_true', help='ignore unresolvable and missing packges')
|
|
|
|
@cmdln.option('--ignore-recommended', action='store_true', help='do not include recommended packages automatically')
|
2017-11-09 18:00:27 +01:00
|
|
|
@cmdln.option('--include-suggested', action='store_true', help='include suggested packges also')
|
2017-11-24 13:39:30 +01:00
|
|
|
@cmdln.option('--locale', action='append', help='locales to inclues')
|
|
|
|
@cmdln.option('--locales-from', metavar='FILE', help='get supported locales from product file FILE')
|
2017-08-31 16:05:07 +02:00
|
|
|
def do_solve(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: Solve groups
|
2017-08-29 18:20:58 +02:00
|
|
|
|
2018-02-05 19:55:32 -06:00
|
|
|
Generates solv from pre-published repository contained in local cache.
|
|
|
|
Use dump_solv to extract solv from published repository.
|
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2017-11-09 15:22:27 +01:00
|
|
|
self.tool.load_all_groups()
|
|
|
|
if not self.tool.output:
|
|
|
|
logger.error('OUTPUT not defined')
|
2017-09-14 15:27:48 +02:00
|
|
|
return
|
2017-09-15 08:14:19 +02:00
|
|
|
|
2017-10-10 08:25:50 +02:00
|
|
|
if opts.ignore_unresolvable:
|
|
|
|
self.tool.ignore_broken = True
|
|
|
|
if opts.ignore_recommended:
|
|
|
|
self.tool.ignore_recommended = True
|
2017-11-09 18:00:27 +01:00
|
|
|
if opts.include_suggested:
|
|
|
|
if opts.ignore_recommended:
|
|
|
|
raise cmdln.CmdlnUserError("--ignore-recommended and --include-suggested don't work together")
|
|
|
|
self.tool.include_suggested = True
|
2017-11-24 13:39:30 +01:00
|
|
|
if opts.locale:
|
|
|
|
for l in opts.locale:
|
|
|
|
self.tool.locales |= set(l.split(','))
|
|
|
|
if opts.locales_from:
|
|
|
|
with open(os.path.join(self.tool.input_dir, opts.locales_from), 'r') as fh:
|
|
|
|
root = ET.parse(fh).getroot()
|
2018-02-13 13:30:15 +01:00
|
|
|
self.tool.locales |= set([lang.text for lang in root.findall(".//linguas/language")])
|
2018-10-01 20:57:43 +02:00
|
|
|
self.tool.filtered_architectures = opts.filtered_architectures
|
2017-10-10 08:25:50 +02:00
|
|
|
|
2017-09-15 08:14:19 +02:00
|
|
|
modules = []
|
2017-09-14 15:27:48 +02:00
|
|
|
# the yml parser makes an array out of everything, so
|
|
|
|
# we loop a bit more than what we support
|
2017-11-09 15:22:27 +01:00
|
|
|
for group in self.tool.output:
|
2017-09-14 15:27:48 +02:00
|
|
|
groupname = group.keys()[0]
|
2017-09-17 12:06:38 +02:00
|
|
|
settings = group[groupname]
|
2018-07-03 11:46:11 +02:00
|
|
|
if not settings: # e.g. unsorted
|
|
|
|
settings = {}
|
2017-09-14 15:27:48 +02:00
|
|
|
includes = settings.get('includes', [])
|
|
|
|
excludes = settings.get('excludes', [])
|
|
|
|
self.tool.solve_module(groupname, includes, excludes)
|
2017-10-23 12:47:24 +02:00
|
|
|
g = self.tool.groups[groupname]
|
|
|
|
g.conflicts = settings.get('conflicts', [])
|
2018-07-04 15:47:12 +02:00
|
|
|
g.default_support_status = settings.get('default-support', 'unsupported')
|
2017-10-23 12:47:24 +02:00
|
|
|
modules.append(g)
|
2017-09-15 08:14:19 +02:00
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
# not defined for openSUSE
|
|
|
|
overlap = self.tool.groups.get('overlap')
|
2017-09-15 08:14:19 +02:00
|
|
|
for module in modules:
|
2017-12-13 12:41:21 +01:00
|
|
|
module.check_dups(modules, overlap)
|
2018-02-12 18:02:28 +01:00
|
|
|
module.collect_devel_packages()
|
2017-11-09 18:00:27 +01:00
|
|
|
module.filter_already_selected(modules)
|
2017-09-01 16:08:47 +02:00
|
|
|
|
2017-12-13 12:41:21 +01:00
|
|
|
if overlap:
|
2018-02-13 13:30:15 +01:00
|
|
|
ignores = [x.name for x in overlap.ignored]
|
2017-12-13 12:41:21 +01:00
|
|
|
self.tool.solve_module(overlap.name, [], ignores)
|
2017-12-15 09:03:46 +01:00
|
|
|
overlapped = set(overlap.solved_packages['*'])
|
2018-10-01 20:57:43 +02:00
|
|
|
for arch in self.tool.filtered_architectures:
|
2017-12-15 09:03:46 +01:00
|
|
|
overlapped |= set(overlap.solved_packages[arch])
|
|
|
|
for module in modules:
|
2018-02-13 13:30:15 +01:00
|
|
|
if module.name == 'overlap' or module in overlap.ignored:
|
|
|
|
continue
|
2018-10-10 06:45:14 +02:00
|
|
|
for arch in ['*'] + self.tool.filtered_architectures:
|
2017-12-15 09:03:46 +01:00
|
|
|
for p in overlapped:
|
|
|
|
module.solved_packages[arch].pop(p, None)
|
2017-12-13 12:41:21 +01:00
|
|
|
|
2018-06-15 14:09:32 +02:00
|
|
|
self.tool._collect_unsorted_packages(modules, self.tool.groups.get('unsorted'))
|
2018-07-16 15:16:33 +02:00
|
|
|
return self.tool._write_all_groups()
|
2017-09-01 16:08:47 +02:00
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
@cmdln.option('-f', '--force', action='store_true', help='continue even if build is in progress')
|
|
|
|
@cmdln.option('-p', '--project', help='target project')
|
2018-07-31 14:52:10 +02:00
|
|
|
@cmdln.option('-s', '--scope', action='append', default=['all'], help='scope on which to operate ({}, staging:$letter)'.format(', '.join(SCOPES)))
|
2018-07-03 04:17:54 +02:00
|
|
|
@cmdln.option('--no-checkout', action='store_true', help='reuse checkout in cache')
|
2018-07-03 09:30:14 +02:00
|
|
|
@cmdln.option('--stop-after-solve', action='store_true', help='only create group files')
|
2017-12-21 00:32:20 -06:00
|
|
|
def do_update_and_solve(self, subcmd, opts):
|
|
|
|
"""${cmd_name}: update and solve for given scope
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
2018-04-11 23:51:49 -05:00
|
|
|
self.error_occured = False
|
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
if not opts.project:
|
|
|
|
raise ValueError('project is required')
|
2018-07-05 10:48:50 +02:00
|
|
|
opts.staging_project = None
|
2017-12-21 00:32:20 -06:00
|
|
|
|
2018-07-31 14:52:10 +02:00
|
|
|
# special case for all
|
|
|
|
if opts.scope == ['all']:
|
|
|
|
opts.scope = self.SCOPES[1:]
|
|
|
|
|
|
|
|
for scope in opts.scope:
|
|
|
|
if scope.startswith('staging:'):
|
|
|
|
opts.staging_project = re.match('staging:(.*)', scope).group(1)
|
|
|
|
opts.staging_project = opts.staging_project.upper()
|
|
|
|
scope = 'staging'
|
|
|
|
if scope not in self.SCOPES:
|
|
|
|
raise ValueError('scope "{}" must be one of: {}'.format(scope, ', '.join(self.SCOPES)))
|
|
|
|
opts.scope = scope
|
|
|
|
self.real_update_and_solve(copy.deepcopy(opts))
|
|
|
|
return self.error_occured
|
|
|
|
|
|
|
|
# note: scope is a value here - while it's an array above
|
|
|
|
def real_update_and_solve(self, opts):
|
2017-12-21 00:32:20 -06:00
|
|
|
# Store target project as opts.project will contain subprojects.
|
|
|
|
target_project = opts.project
|
|
|
|
|
|
|
|
apiurl = conf.config['apiurl']
|
2018-08-16 21:46:05 -05:00
|
|
|
config = Config(apiurl, target_project)
|
2017-12-21 00:32:20 -06:00
|
|
|
api = StagingAPI(apiurl, target_project)
|
|
|
|
|
|
|
|
target_config = conf.config[target_project]
|
2018-07-10 13:26:02 +02:00
|
|
|
if opts.scope == 'ports':
|
|
|
|
archs_key = 'pkglistgen-archs-ports'
|
|
|
|
elif opts.scope == 'arm':
|
|
|
|
archs_key = 'pkglistgen-archs-arm'
|
|
|
|
else:
|
|
|
|
archs_key = 'pkglistgen-archs'
|
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
if archs_key in target_config:
|
|
|
|
self.options.architectures = target_config.get(archs_key).split(' ')
|
|
|
|
main_repo = target_config['main-repo']
|
|
|
|
|
|
|
|
if opts.scope == 'target':
|
2018-07-02 20:36:50 +02:00
|
|
|
self.repos = self.tool.expand_repos(target_project, main_repo)
|
2018-07-16 15:16:33 +02:00
|
|
|
self.update_and_solve_target_wrapper(api, target_project, target_config, main_repo, opts, drop_list=True)
|
2018-04-11 23:51:49 -05:00
|
|
|
return self.error_occured
|
2018-07-10 13:26:02 +02:00
|
|
|
elif opts.scope == 'arm':
|
|
|
|
main_repo = 'ports'
|
|
|
|
opts.project += ':ARM'
|
|
|
|
self.repos = self.tool.expand_repos(opts.project, main_repo)
|
2018-08-02 17:05:16 +08:00
|
|
|
self.update_and_solve_target_wrapper(api, target_project, target_config, main_repo, opts, drop_list=True)
|
2018-07-10 13:26:02 +02:00
|
|
|
return self.error_occured
|
2017-12-21 00:32:20 -06:00
|
|
|
elif opts.scope == 'ports':
|
|
|
|
# TODO Continue supporting #1297, but should be abstracted.
|
|
|
|
main_repo = 'ports'
|
|
|
|
opts.project += ':Ports'
|
2018-07-02 20:36:50 +02:00
|
|
|
self.repos = self.tool.expand_repos(opts.project, main_repo)
|
2018-07-16 15:16:33 +02:00
|
|
|
self.update_and_solve_target_wrapper(api, target_project, target_config, main_repo, opts, drop_list=True)
|
2018-04-11 23:51:49 -05:00
|
|
|
return self.error_occured
|
2017-12-21 00:32:20 -06:00
|
|
|
elif opts.scope == 'rings':
|
|
|
|
opts.project = api.rings[1]
|
2018-07-02 20:36:50 +02:00
|
|
|
self.repos = self.tool.expand_repos(api.rings[1], main_repo)
|
2018-07-16 15:16:33 +02:00
|
|
|
self.update_and_solve_target_wrapper(api, target_project, target_config, main_repo, opts)
|
2018-04-11 23:51:49 -05:00
|
|
|
return self.error_occured
|
2017-12-21 00:32:20 -06:00
|
|
|
elif opts.scope == 'staging':
|
|
|
|
letters = api.get_staging_projects_short()
|
|
|
|
for letter in letters:
|
2018-07-05 10:48:50 +02:00
|
|
|
if opts.staging_project and letter != opts.staging_project:
|
|
|
|
continue
|
2017-12-21 00:32:20 -06:00
|
|
|
opts.project = api.prj_from_short(letter)
|
2018-07-02 20:36:50 +02:00
|
|
|
self.repos = self.tool.expand_repos(opts.project, main_repo)
|
2018-07-16 15:16:33 +02:00
|
|
|
self.update_and_solve_target_wrapper(api, target_project, target_config, main_repo, opts)
|
2018-04-11 23:51:49 -05:00
|
|
|
return self.error_occured
|
|
|
|
|
|
|
|
def update_and_solve_target_wrapper(self, *args, **kwargs):
|
|
|
|
try:
|
|
|
|
self.update_and_solve_target(*args, **kwargs)
|
|
|
|
except Exception as e:
|
|
|
|
# Print exception, but continue to prevent problems effecting one
|
|
|
|
# project from killing the whole process. Downside being a common
|
|
|
|
# error will be duplicated for each project. Common exceptions could
|
|
|
|
# be excluded if a set list is determined, but that is likely not
|
|
|
|
# practical.
|
|
|
|
traceback.print_exc()
|
|
|
|
self.error_occured = True
|
2017-12-21 00:32:20 -06:00
|
|
|
|
2018-07-16 15:16:33 +02:00
|
|
|
def update_and_solve_target(self, api, target_project, target_config, main_repo, opts,
|
2018-02-05 19:54:43 -06:00
|
|
|
skip_release=False, drop_list=False):
|
2018-01-08 22:46:11 -06:00
|
|
|
print('[{}] {}/{}: update and solve'.format(opts.scope, opts.project, main_repo))
|
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
group = target_config.get('pkglistgen-group', '000package-groups')
|
|
|
|
product = target_config.get('pkglistgen-product', '000product')
|
|
|
|
release = target_config.get('pkglistgen-release', '000release-packages')
|
|
|
|
|
2018-10-01 20:57:43 +02:00
|
|
|
opts.filtered_architectures = []
|
|
|
|
# make sure we only calculcate existant architectures
|
|
|
|
for arch in target_archs(api.apiurl, opts.project, main_repo):
|
|
|
|
if arch in self.options.architectures:
|
|
|
|
opts.filtered_architectures.append(arch)
|
|
|
|
|
2018-07-16 15:16:33 +02:00
|
|
|
url = api.makeurl(['source', opts.project])
|
2017-12-21 00:32:20 -06:00
|
|
|
packages = ET.parse(http_GET(url)).getroot()
|
|
|
|
if packages.find('entry[@name="{}"]'.format(product)) is None:
|
2018-01-08 22:45:28 -06:00
|
|
|
if not self.options.dry:
|
2018-07-16 15:16:33 +02:00
|
|
|
undelete_package(api.apiurl, opts.project, product, 'revive')
|
2017-12-21 00:32:20 -06:00
|
|
|
# TODO disable build.
|
|
|
|
print('{} undeleted, skip dvd until next cycle'.format(product))
|
|
|
|
return
|
|
|
|
elif not opts.force:
|
2018-07-16 15:16:33 +02:00
|
|
|
root = ET.fromstringlist(show_results_meta(api.apiurl, opts.project, product,
|
2017-12-21 00:32:20 -06:00
|
|
|
repository=[main_repo], multibuild=True))
|
|
|
|
if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
|
|
|
|
print('{}/{} build in progress'.format(opts.project, product))
|
|
|
|
return
|
|
|
|
|
|
|
|
checkout_list = [group, product]
|
|
|
|
if not skip_release:
|
|
|
|
checkout_list.append(release)
|
|
|
|
|
|
|
|
if packages.find('entry[@name="{}"]'.format(release)) is None:
|
2018-01-08 22:45:28 -06:00
|
|
|
if not self.options.dry:
|
2018-07-16 15:16:33 +02:00
|
|
|
undelete_package(api.apiurl, opts.project, release, 'revive')
|
2017-12-21 00:32:20 -06:00
|
|
|
print('{} undeleted, skip dvd until next cycle'.format(release))
|
|
|
|
return
|
|
|
|
|
|
|
|
# Cache dir specific to hostname and project.
|
2018-07-16 15:16:33 +02:00
|
|
|
host = urlparse.urlparse(api.apiurl).hostname
|
2018-09-04 15:03:03 -05:00
|
|
|
cache_dir = CacheManager.directory('pkglistgen', host, opts.project)
|
2017-12-21 00:32:20 -06:00
|
|
|
|
2018-07-03 04:17:54 +02:00
|
|
|
if not opts.no_checkout:
|
|
|
|
if os.path.exists(cache_dir):
|
|
|
|
shutil.rmtree(cache_dir)
|
|
|
|
os.makedirs(cache_dir)
|
2017-12-21 00:32:20 -06:00
|
|
|
|
|
|
|
group_dir = os.path.join(cache_dir, group)
|
|
|
|
product_dir = os.path.join(cache_dir, product)
|
|
|
|
release_dir = os.path.join(cache_dir, release)
|
|
|
|
|
|
|
|
for package in checkout_list:
|
2018-07-03 04:17:54 +02:00
|
|
|
if opts.no_checkout:
|
|
|
|
print("Skipping checkout of {}/{}".format(opts.project, package))
|
|
|
|
continue
|
2018-07-16 15:16:33 +02:00
|
|
|
checkout_package(api.apiurl, opts.project, package, expand_link=True, prj_dir=cache_dir)
|
2017-12-21 00:32:20 -06:00
|
|
|
|
|
|
|
if not skip_release:
|
|
|
|
self.unlink_all_except(release_dir)
|
|
|
|
self.unlink_all_except(product_dir)
|
|
|
|
self.copy_directory_contents(group_dir, product_dir,
|
|
|
|
['supportstatus.txt', 'groups.yml', 'package-groups.changes'])
|
|
|
|
self.change_extension(product_dir, '.spec.in', '.spec')
|
2018-02-13 22:04:56 +01:00
|
|
|
self.change_extension(product_dir, '.product.in', '.product')
|
2017-12-21 00:32:20 -06:00
|
|
|
|
2018-01-08 22:43:51 -06:00
|
|
|
self.options.input_dir = group_dir
|
2017-12-21 00:32:20 -06:00
|
|
|
self.options.output_dir = product_dir
|
|
|
|
self.postoptparse()
|
|
|
|
|
2018-01-08 22:46:11 -06:00
|
|
|
print('-> do_update')
|
2017-12-21 00:32:20 -06:00
|
|
|
self.do_update('update', opts)
|
|
|
|
|
2018-02-05 19:54:43 -06:00
|
|
|
nonfree = target_config.get('nonfree')
|
2018-07-10 13:26:02 +02:00
|
|
|
if opts.scope not in ('arm', 'ports') and nonfree and drop_list:
|
2018-02-05 19:54:43 -06:00
|
|
|
print('-> do_update nonfree')
|
|
|
|
|
|
|
|
# Switch to nonfree repo (ugly, but that's how the code was setup).
|
2018-07-02 20:36:50 +02:00
|
|
|
repos_ = self.repos
|
2018-02-05 19:54:43 -06:00
|
|
|
opts_nonfree = copy.deepcopy(opts)
|
|
|
|
opts_nonfree.project = nonfree
|
2018-08-02 17:51:00 +08:00
|
|
|
self.repos = self.tool.expand_repos(nonfree, main_repo)
|
2018-02-05 19:54:43 -06:00
|
|
|
self.do_update('update', opts_nonfree)
|
|
|
|
|
|
|
|
# Switch repo back to main target project.
|
2018-07-02 20:36:50 +02:00
|
|
|
self.repos = repos_
|
2018-02-05 19:54:43 -06:00
|
|
|
|
2018-07-10 13:26:02 +02:00
|
|
|
print('-> update_merge')
|
|
|
|
self.update_merge(nonfree if drop_list else False)
|
2018-02-05 19:54:43 -06:00
|
|
|
|
2018-01-08 22:46:11 -06:00
|
|
|
print('-> do_solve')
|
2018-03-09 13:14:01 +01:00
|
|
|
opts.ignore_unresolvable = str2bool(target_config.get('pkglistgen-ignore-unresolvable'))
|
|
|
|
opts.ignore_recommended = str2bool(target_config.get('pkglistgen-ignore-recommended'))
|
|
|
|
opts.include_suggested = str2bool(target_config.get('pkglistgen-include-suggested'))
|
2018-01-08 22:44:29 -06:00
|
|
|
opts.locale = target_config.get('pkglistgen-local')
|
2018-01-10 14:10:17 +01:00
|
|
|
opts.locales_from = target_config.get('pkglistgen-locales-from')
|
2018-07-16 15:16:33 +02:00
|
|
|
summary = self.do_solve('solve', opts)
|
|
|
|
|
2018-07-03 09:30:14 +02:00
|
|
|
if opts.stop_after_solve:
|
|
|
|
return
|
2017-12-21 00:32:20 -06:00
|
|
|
|
2018-02-05 19:54:43 -06:00
|
|
|
if drop_list:
|
|
|
|
# Ensure solv files from all releases in product family are updated.
|
|
|
|
print('-> solv_cache_update')
|
2018-09-04 15:03:03 -05:00
|
|
|
cache_dir_solv = CacheManager.directory('pkglistgen', 'solv')
|
2018-02-12 17:56:29 -06:00
|
|
|
family_last = target_config.get('pkglistgen-product-family-last')
|
2018-02-05 19:54:43 -06:00
|
|
|
family_include = target_config.get('pkglistgen-product-family-include')
|
|
|
|
solv_prior = self.solv_cache_update(
|
2018-07-16 15:16:33 +02:00
|
|
|
api.apiurl, cache_dir_solv, target_project, family_last, family_include, opts)
|
2018-02-05 19:54:43 -06:00
|
|
|
|
|
|
|
# Include pre-final release solv files for target project. These
|
|
|
|
# files will only exist from previous runs.
|
|
|
|
cache_dir_solv_current = os.path.join(cache_dir_solv, target_project)
|
|
|
|
solv_prior.update(glob.glob(os.path.join(cache_dir_solv_current, '*.merged.solv')))
|
|
|
|
for solv_file in solv_prior:
|
|
|
|
logger.debug(solv_file.replace(cache_dir_solv, ''))
|
|
|
|
|
|
|
|
print('-> do_create_droplist')
|
|
|
|
# Reset to product after solv_cache_update().
|
|
|
|
self.options.output_dir = product_dir
|
|
|
|
self.do_create_droplist('create_droplist', opts, *solv_prior)
|
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
|
|
|
|
self.unlink_list(product_dir, delete_products)
|
|
|
|
|
2018-01-08 22:46:11 -06:00
|
|
|
print('-> product service')
|
2017-12-21 00:32:20 -06:00
|
|
|
for product_file in glob.glob(os.path.join(product_dir, '*.product')):
|
|
|
|
print(subprocess.check_output(
|
|
|
|
[PRODUCT_SERVICE, product_file, product_dir, opts.project]))
|
|
|
|
|
2018-01-17 14:33:36 +01:00
|
|
|
delete_kiwis = target_config.get('pkglistgen-delete-kiwis-{}'.format(opts.scope), '').split(' ')
|
2017-12-21 00:32:20 -06:00
|
|
|
self.unlink_list(product_dir, delete_kiwis)
|
2018-02-13 22:04:56 +01:00
|
|
|
if opts.scope == 'staging':
|
|
|
|
self.strip_medium_from_staging(product_dir)
|
2017-12-21 00:32:20 -06:00
|
|
|
|
|
|
|
spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
|
|
|
|
if skip_release:
|
|
|
|
self.unlink_list(None, spec_files)
|
|
|
|
else:
|
|
|
|
self.move_list(spec_files, release_dir)
|
2018-06-19 06:53:47 +02:00
|
|
|
inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
|
|
|
|
self.move_list(inc_files, release_dir)
|
2017-12-21 00:32:20 -06:00
|
|
|
|
|
|
|
self.multibuild_from_glob(product_dir, '*.kiwi')
|
|
|
|
self.build_stub(product_dir, 'kiwi')
|
|
|
|
self.commit_package(product_dir)
|
|
|
|
|
|
|
|
if not skip_release:
|
|
|
|
self.multibuild_from_glob(release_dir, '*.spec')
|
|
|
|
self.build_stub(release_dir, 'spec')
|
|
|
|
self.commit_package(release_dir)
|
|
|
|
|
2018-07-16 15:16:33 +02:00
|
|
|
if api.item_exists(opts.project, '000product-summary'):
|
|
|
|
summary_str = "# Summary of packages in groups"
|
|
|
|
for group in sorted(summary.keys()):
|
2018-09-24 15:33:10 +02:00
|
|
|
# the unsorted group should appear filtered by
|
|
|
|
# unneeded.yml - so we need the content of unsorted.yml
|
|
|
|
# not unsorted.group (this grew a little unnaturally)
|
2018-09-27 07:37:07 +02:00
|
|
|
if group == 'unsorted':
|
|
|
|
continue
|
2018-07-16 15:16:33 +02:00
|
|
|
summary_str += "\n" + group + ":\n"
|
|
|
|
for package in sorted(summary[group]):
|
|
|
|
summary_str += " - " + package + "\n"
|
|
|
|
|
2018-10-09 07:24:30 +02:00
|
|
|
source_file_ensure(api.apiurl, opts.project, '000product-summary',
|
|
|
|
'summary.yml', summary_str, 'Updating summary.yml')
|
2018-09-24 15:33:10 +02:00
|
|
|
unsorted_yml = open(os.path.join(product_dir, 'unsorted.yml')).read()
|
2018-10-09 07:24:30 +02:00
|
|
|
source_file_ensure(api.apiurl, opts.project, '000product-summary',
|
|
|
|
'unsorted.yml', unsorted_yml, 'Updating unsorted.yml')
|
2018-09-24 15:33:10 +02:00
|
|
|
|
2018-02-12 17:56:29 -06:00
|
|
|
def solv_cache_update(self, apiurl, cache_dir_solv, target_project, family_last, family_include, opts):
|
2018-02-05 19:54:43 -06:00
|
|
|
"""Dump solv files (do_dump_solv) for all products in family."""
|
|
|
|
prior = set()
|
|
|
|
|
2018-02-12 17:56:29 -06:00
|
|
|
project_family = project_list_family_prior(
|
|
|
|
apiurl, target_project, include_self=True, last=family_last)
|
2018-02-05 19:54:43 -06:00
|
|
|
if family_include:
|
|
|
|
# Include projects from a different family if desired.
|
|
|
|
project_family.extend(project_list_family(apiurl, family_include))
|
|
|
|
|
|
|
|
for project in project_family:
|
2018-08-16 21:46:05 -05:00
|
|
|
config = Config(apiurl, project)
|
2018-02-05 19:54:43 -06:00
|
|
|
project_config = conf.config[project]
|
|
|
|
|
|
|
|
baseurl = project_config.get('download-baseurl')
|
2018-02-15 14:41:26 -06:00
|
|
|
baseurl_update = project_config.get('download-baseurl-update')
|
2018-02-05 19:54:43 -06:00
|
|
|
if not baseurl:
|
|
|
|
logger.warning('no baseurl configured for {}'.format(project))
|
|
|
|
continue
|
|
|
|
|
|
|
|
urls = [urlparse.urljoin(baseurl, 'repo/oss/')]
|
2018-02-15 14:41:26 -06:00
|
|
|
if baseurl_update:
|
|
|
|
urls.append(urlparse.urljoin(baseurl_update, 'oss/'))
|
2018-02-05 19:54:43 -06:00
|
|
|
if project_config.get('nonfree'):
|
|
|
|
urls.append(urlparse.urljoin(baseurl, 'repo/non-oss/'))
|
2018-02-15 14:41:26 -06:00
|
|
|
if baseurl_update:
|
|
|
|
urls.append(urlparse.urljoin(baseurl_update, 'non-oss/'))
|
2018-02-05 19:54:43 -06:00
|
|
|
|
|
|
|
names = []
|
|
|
|
for url in urls:
|
2018-02-15 14:41:26 -06:00
|
|
|
project_display = project
|
|
|
|
if 'update' in url:
|
|
|
|
project_display += ':Update'
|
2018-02-05 19:54:43 -06:00
|
|
|
print('-> do_dump_solv for {}/{}'.format(
|
2018-02-15 14:41:26 -06:00
|
|
|
project_display, os.path.basename(os.path.normpath(url))))
|
2018-02-05 19:54:43 -06:00
|
|
|
logger.debug(url)
|
|
|
|
|
|
|
|
self.options.output_dir = os.path.join(cache_dir_solv, project)
|
|
|
|
if not os.path.exists(self.options.output_dir):
|
|
|
|
os.makedirs(self.options.output_dir)
|
|
|
|
|
|
|
|
opts.overwrite = False
|
2018-07-10 13:26:02 +02:00
|
|
|
solv_name = self.do_dump_solv('dump_solv', opts, url)
|
|
|
|
if solv_name:
|
|
|
|
names.append(solv_name)
|
2018-02-05 19:54:43 -06:00
|
|
|
|
|
|
|
if not len(names):
|
|
|
|
logger.warning('no solv files were dumped for {}'.format(project))
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Merge nonfree solv with free solv or copy free solv as merged.
|
|
|
|
merged = names[0].replace('.solv', '.merged.solv')
|
2018-02-15 14:41:26 -06:00
|
|
|
if len(names) >= 2:
|
2018-02-15 09:29:19 -06:00
|
|
|
self.solv_merge(merged, *names)
|
2018-02-05 19:54:43 -06:00
|
|
|
else:
|
|
|
|
shutil.copyfile(names[0], merged)
|
|
|
|
prior.add(merged)
|
|
|
|
|
|
|
|
return prior
|
|
|
|
|
2018-02-13 22:04:56 +01:00
|
|
|
# staging projects don't need source and debug medium - and the glibc source
|
|
|
|
# rpm conflicts between standard and bootstrap_copy repository causing the
|
|
|
|
# product builder to fail
|
|
|
|
def strip_medium_from_staging(self, path):
|
|
|
|
medium = re.compile('name="(DEBUG|SOURCE)MEDIUM"')
|
|
|
|
for name in glob.glob(os.path.join(path, '*.kiwi')):
|
|
|
|
lines = open(name).readlines()
|
|
|
|
lines = [l for l in lines if not medium.search(l)]
|
|
|
|
open(name, 'w').writelines(lines)
|
|
|
|
|
2017-12-21 00:32:20 -06:00
|
|
|
def move_list(self, file_list, destination):
|
|
|
|
for name in file_list:
|
|
|
|
os.rename(name, os.path.join(destination, os.path.basename(name)))
|
|
|
|
|
|
|
|
def unlink_list(self, path, names):
|
|
|
|
for name in names:
|
|
|
|
if path is None:
|
|
|
|
name_path = name
|
|
|
|
else:
|
|
|
|
name_path = os.path.join(path, name)
|
|
|
|
|
|
|
|
if os.path.isfile(name_path):
|
|
|
|
os.unlink(name_path)
|
|
|
|
|
|
|
|
def unlink_all_except(self, path, ignore_list=['_service'], ignore_hidden=True):
|
|
|
|
for name in os.listdir(path):
|
|
|
|
if name in ignore_list or (ignore_hidden and name.startswith('.')):
|
|
|
|
continue
|
|
|
|
|
|
|
|
name_path = os.path.join(path, name)
|
|
|
|
if os.path.isfile(name_path):
|
|
|
|
os.unlink(name_path)
|
|
|
|
|
|
|
|
def copy_directory_contents(self, source, destination, ignore_list=[]):
|
|
|
|
for name in os.listdir(source):
|
|
|
|
name_path = os.path.join(source, name)
|
|
|
|
if name in ignore_list or not os.path.isfile(name_path):
|
|
|
|
continue
|
|
|
|
|
|
|
|
shutil.copy(name_path, os.path.join(destination, name))
|
|
|
|
|
|
|
|
def change_extension(self, path, original, final):
|
|
|
|
for name in glob.glob(os.path.join(path, '*{}'.format(original))):
|
|
|
|
# Assumes the extension is only found at the end.
|
|
|
|
os.rename(name, name.replace(original, final))
|
|
|
|
|
|
|
|
def multibuild_from_glob(self, destination, pathname):
|
|
|
|
root = ET.Element('multibuild')
|
2018-02-13 22:04:56 +01:00
|
|
|
for name in sorted(glob.glob(os.path.join(destination, pathname))):
|
2017-12-21 00:32:20 -06:00
|
|
|
package = ET.SubElement(root, 'package')
|
|
|
|
package.text = os.path.splitext(os.path.basename(name))[0]
|
|
|
|
|
|
|
|
with open(os.path.join(destination, '_multibuild'), 'w+b') as f:
|
|
|
|
f.write(ET.tostring(root, pretty_print=True))
|
|
|
|
|
|
|
|
def build_stub(self, destination, extension):
|
|
|
|
f = file(os.path.join(destination, '.'.join(['stub', extension])), 'w+')
|
|
|
|
f.write('# prevent building single {} files twice\n'.format(extension))
|
|
|
|
f.write('Name: stub\n')
|
|
|
|
f.write('Version: 0.0\n')
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
def commit_package(self, path):
|
|
|
|
package = Package(path)
|
|
|
|
if self.options.dry:
|
|
|
|
for i in package.get_diff():
|
|
|
|
print(''.join(i))
|
|
|
|
else:
|
|
|
|
# No proper API function to perform the same operation.
|
|
|
|
print(subprocess.check_output(
|
|
|
|
' '.join(['cd', path, '&&', 'osc', 'addremove']), shell=True))
|
2018-03-09 16:42:08 +08:00
|
|
|
package.commit(msg='Automatic update', skip_local_service_run=True)
|
2017-12-21 00:32:20 -06:00
|
|
|
|
2017-09-09 12:52:22 +02:00
|
|
|
|
2017-08-29 18:20:58 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
app = CommandLineInterface()
|
2017-09-07 14:45:37 +02:00
|
|
|
sys.exit(app.main())
|