import ToolBase import glob import logging import os import re import solv import shutil import subprocess import yaml from lxml import etree as ET from osc.core import checkout_package from osc.core import http_GET from osc.core import show_results_meta from osc.core import Package from osc.core import undelete_package from osclib.core import attribute_value_load from osclib.core import target_archs from osclib.conf import str2bool from osclib.core import repository_path_expand from osclib.core import repository_arch_state from osclib.cache_manager import CacheManager from osclib.pkglistgen_comments import PkglistComments from urllib.parse import urlparse from pkglistgen import file_utils from pkglistgen.group import Group SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__)) PRODUCT_SERVICE = '/usr/lib/obs/service/create_single_product' # share header cache with repochecker CACHEDIR = CacheManager.directory('repository-meta') class MismatchedRepoException(Exception): """raised on repos that restarted building""" class PkgListGen(ToolBase.ToolBase): def __init__(self): ToolBase.ToolBase.__init__(self) self.logger = logging.getLogger(__name__) self.comment = PkglistComments(self.apiurl) self.reset() def reset(self): # package -> supportatus self.packages = dict() self.groups = dict() self._supportstatus = None self.input_dir = '.' self.output_dir = '.' self.lockjobs = dict() self.ignore_broken = False self.unwanted = set() self.output = None self.locales = set() self.filtered_architectures = None self.dry_run = False self.all_architectures = None def filter_architectures(self, architectures): self.filtered_architectures = sorted(list(set(architectures) & set(self.all_architectures))) def _load_supportstatus(self): # XXX fn = os.path.join(self.input_dir, 'supportstatus.txt') self._supportstatus = dict() if os.path.exists(fn): with open(fn, 'r') as fh: for line in fh: # pkg, status fields = line.rstrip().split(' ') if len(fields) > 1: self._supportstatus[fields[0]] = fields[1] def supportstatus(self, package): if self._supportstatus is None: self._load_supportstatus() return self._supportstatus.get(package) def _load_group_file(self, fn): output = None unwanted = None with open(fn, 'r') as fh: self.logger.debug('reading %s', fn) for groupname, group in yaml.safe_load(fh).items(): if groupname == 'OUTPUT': output = group continue if groupname == 'UNWANTED': unwanted = set(group) continue g = Group(groupname, self) g.parse_yml(group) return output, unwanted def group_input_files(self): return glob.glob(os.path.join(self.input_dir, 'group*.yml')) def load_all_groups(self): for fn in self.group_input_files(): o, u = self._load_group_file(fn) if o: if self.output is not None: raise Exception('OUTPUT defined multiple times') self.output = o if u: self.unwanted |= u # required to generate release spec files (only) def write_group_stubs(self): archs = ['*'] + self.all_architectures for name in self.groups: group = self.groups[name] group.solved_packages = dict() fn = '{}.group'.format(group.name) with open(os.path.join(self.output_dir, fn), 'w') as fh: for arch in archs: x = group.toxml(arch, group.ignore_broken, None) x = ET.tostring(x, pretty_print=True, encoding='unicode') fh.write(x) def write_all_groups(self): self._check_supplements() summary = dict() archs = ['*'] + self.all_architectures for name in self.groups: group = self.groups[name] if not group.solved: continue summary[name] = group.summary() fn = '{}.group'.format(group.name) with open(os.path.join(self.output_dir, fn), 'w') as fh: comment = group.comment for arch in archs: x = group.toxml(arch, group.ignore_broken, comment) # only comment first time comment = None x = ET.tostring(x, pretty_print=True, encoding='unicode') x = re.sub(r'\s*