from __future__ import print_function import ToolBase import glob import logging import os import re import solv import shutil import subprocess import yaml from lxml import etree as ET from osc import conf from osc.core import checkout_package from osc.core import http_GET, http_PUT from osc.core import HTTPError from osc.core import show_results_meta from osc.core import Package from osclib.core import target_archs from osclib.conf import Config, str2bool from osclib.core import repository_path_expand from osclib.core import repository_arch_state from osclib.cache_manager import CacheManager try: from urllib.parse import urljoin, urlparse except ImportError: # python 2.x from urlparse import urljoin, urlparse from pkglistgen import file_utils from pkglistgen.group import Group, ARCHITECTURES SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__)) # share header cache with repochecker CACHEDIR = CacheManager.directory('repository-meta') PRODUCT_SERVICE = '/usr/lib/obs/service/create_single_product' class PkgListGen(ToolBase.ToolBase): def __init__(self): ToolBase.ToolBase.__init__(self) # package -> supportatus self.packages = dict() self.groups = dict() self._supportstatus = None self.input_dir = '.' self.output_dir = '.' self.lockjobs = dict() self.ignore_broken = False self.unwanted = set() self.output = None self.locales = set() self.did_update = False self.logger = logging.getLogger(__name__) self.filtered_architectures = None self.dry_run = False self.architectures = ARCHITECTURES def filter_architectures(self, architectures): self.filtered_architectures = list(set(architectures) & set(self.architectures)) def _load_supportstatus(self): # XXX fn = os.path.join(self.input_dir, 'supportstatus.txt') self._supportstatus = dict() if os.path.exists(fn): with open(fn, 'r') as fh: for l in fh: # pkg, status a = l.rstrip().split(' ') if len(a) > 1: self._supportstatus[a[0]] = a[1] def supportstatus(self, package): if self._supportstatus is None: self._load_supportstatus() return self._supportstatus.get(package) def _load_group_file(self, fn): output = None unwanted = None with open(fn, 'r') as fh: self.logger.debug("reading %s", fn) for groupname, group in yaml.safe_load(fh).items(): if groupname == 'OUTPUT': output = group continue if groupname == 'UNWANTED': unwanted = set(group) continue g = Group(groupname, self) g.parse_yml(group) return output, unwanted def load_all_groups(self): for fn in glob.glob(os.path.join(self.input_dir, 'group*.yml')): o, u = self._load_group_file(fn) if o: if self.output is not None: raise Exception('OUTPUT defined multiple times') self.output = o if u: self.unwanted |= u # required to generate release spec files (only) def write_group_stubs(self): archs = ['*'] + self.architectures for name in self.groups: group = self.groups[name] group.solved_packages = dict() fn = '{}.group'.format(group.name) with open(os.path.join(self.output_dir, fn), 'w') as fh: for arch in archs: x = group.toxml(arch, self.ignore_broken, None) x = ET.tostring(x, pretty_print=True) fh.write(x) def write_all_groups(self): self._check_supplements() summary = dict() archs = ['*'] + self.architectures for name in self.groups: group = self.groups[name] if not group.solved: continue summary[name] = group.summary() fn = '{}.group'.format(group.name) with open(os.path.join(self.output_dir, fn), 'w') as fh: comment = group.comment for arch in archs: x = group.toxml(arch, self.ignore_broken, comment) # only comment first time comment = None x = ET.tostring(x, pretty_print=True) x = re.sub(r'\s*" % reponame, file=ofh) for p in sorted(drops): if drops[p] != reponame: continue print(" %s" % p, file=ofh) def solve_project(self, ignore_unresolvable=False, ignore_recommended=False, locale=None, locales_from=None): """ Generates solv from pre-published repository contained in local cache. Use dump_solv to extract solv from published repository. """ self.load_all_groups() if not self.output: self.logger.error('OUTPUT not defined') return if ignore_unresolvable: self.ignore_broken = True global_use_recommends = not ignore_recommended if locale: for l in locale: self.locales |= set(l.split(',')) if locales_from: with open(os.path.join(self.input_dir, locales_from), 'r') as fh: root = ET.parse(fh).getroot() self.locales |= set([lang.text for lang in root.findall(".//linguas/language")]) modules = [] # the yml parser makes an array out of everything, so # we loop a bit more than what we support for group in self.output: groupname = group.keys()[0] settings = group[groupname] if not settings: # e.g. unsorted settings = {} includes = settings.get('includes', []) excludes = settings.get('excludes', []) use_recommends = settings.get('recommends', global_use_recommends) self.solve_module(groupname, includes, excludes, use_recommends) g = self.groups[groupname] g.conflicts = settings.get('conflicts', []) g.default_support_status = settings.get('default-support', 'unsupported') modules.append(g) # not defined for openSUSE overlap = self.groups.get('overlap') for module in modules: module.check_dups(modules, overlap) module.collect_devel_packages() module.filter_already_selected(modules) if overlap: ignores = [x.name for x in overlap.ignored] self.solve_module(overlap.name, [], ignores, use_recommends=False) overlapped = set(overlap.solved_packages['*']) for arch in self.filtered_architectures: overlapped |= set(overlap.solved_packages[arch]) for module in modules: if module.name == 'overlap' or module in overlap.ignored: continue for arch in ['*'] + self.filtered_architectures: for p in overlapped: module.solved_packages[arch].pop(p, None) self._collect_unsorted_packages(modules, self.groups.get('unsorted')) return self.write_all_groups() # staging projects don't need source and debug medium - and the glibc source # rpm conflicts between standard and bootstrap_copy repository causing the # product builder to fail def strip_medium_from_staging(self, path): medium = re.compile('name="(DEBUG|SOURCE)MEDIUM"') for name in glob.glob(os.path.join(path, '*.kiwi')): lines = open(name).readlines() lines = [l for l in lines if not medium.search(l)] open(name, 'w').writelines(lines) def build_stub(self, destination, extension): f = file(os.path.join(destination, '.'.join(['stub', extension])), 'w+') f.write('# prevent building single {} files twice\n'.format(extension)) f.write('Name: stub\n') f.write('Version: 0.0\n') f.close() def commit_package(self, path): if self.dry_run: package = Package(path) for i in package.get_diff(): print(''.join(i)) else: # No proper API function to perform the same operation. print(subprocess.check_output( ' '.join(['cd', path, '&&', 'osc', 'addremove']), shell=True)) package = Package(path) package.commit(msg='Automatic update', skip_local_service_run=True) def update_and_solve_target(self, api, target_project, target_config, main_repo, project, scope, force, no_checkout, only_release_packages, stop_after_solve, drop_list=False): self.repos = self.expand_repos(project, main_repo) print('[{}] {}/{}: update and solve'.format(scope, project, main_repo)) group = target_config.get('pkglistgen-group', '000package-groups') product = target_config.get('pkglistgen-product', '000product') release = target_config.get('pkglistgen-release', '000release-packages') url = api.makeurl(['source', project]) packages = ET.parse(http_GET(url)).getroot() if packages.find('entry[@name="{}"]'.format(product)) is None: if not self.dry_run: undelete_package(api.apiurl, project, product, 'revive') # TODO disable build. print('{} undeleted, skip dvd until next cycle'.format(product)) return elif not force: root = ET.fromstringlist(show_results_meta(api.apiurl, project, product, repository=[main_repo], multibuild=True)) if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')): print('{}/{} build in progress'.format(project, product)) return checkout_list = [group, product, release] if packages.find('entry[@name="{}"]'.format(release)) is None: if not self.dry_run: undelete_package(api.apiurl, project, release, 'revive') print('{} undeleted, skip dvd until next cycle'.format(release)) return # Cache dir specific to hostname and project. host = urlparse(api.apiurl).hostname cache_dir = CacheManager.directory('pkglistgen', host, project) if not no_checkout: if os.path.exists(cache_dir): shutil.rmtree(cache_dir) os.makedirs(cache_dir) group_dir = os.path.join(cache_dir, group) product_dir = os.path.join(cache_dir, product) release_dir = os.path.join(cache_dir, release) for package in checkout_list: if no_checkout: print("Skipping checkout of {}/{}".format(project, package)) continue checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir) file_utils.unlink_all_except(release_dir) if not only_release_packages: file_utils.unlink_all_except(product_dir) file_utils.copy_directory_contents(group_dir, product_dir, ['supportstatus.txt', 'groups.yml', 'package-groups.changes']) file_utils.change_extension(product_dir, '.spec.in', '.spec') file_utils.change_extension(product_dir, '.product.in', '.product') self.input_dir = group_dir self.output_dir = product_dir print('-> do_update') # make sure we only calculcate existant architectures self.filter_architectures(target_archs(api.apiurl, project, main_repo)) self.update_repos(self.filtered_architectures) nonfree = target_config.get('nonfree') if nonfree and drop_list: print('-> do_update nonfree') # Switch to nonfree repo (ugly, but that's how the code was setup). repos_ = self.repos self.repos = self.expand_repos(nonfree, main_repo) self.update_repos(self.filtered_architectures) # Switch repo back to main target project. self.repos = repos_ print('-> update_merge') solv_utils.update_merge(nonfree if drop_list else False, self.repos, self.architectures) if only_release_packages: self.load_all_groups() self.write_group_stubs() else: summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')), ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')), locale = target_config.get('pkglistgen-local'), locales_from = target_config.get('pkglistgen-locales-from')) if stop_after_solve: return if drop_list: # Ensure solv files from all releases in product family are updated. print('-> solv_cache_update') cache_dir_solv = CacheManager.directory('pkglistgen', 'solv') family_last = target_config.get('pkglistgen-product-family-last') family_include = target_config.get('pkglistgen-product-family-include') solv_prior = solv_utils.solv_cache_update(api.apiurl, cache_dir_solv, target_project, family_last, family_include) # Include pre-final release solv files for target project. These # files will only exist from previous runs. cache_dir_solv_current = os.path.join(cache_dir_solv, target_project) solv_prior.update(glob.glob(os.path.join(cache_dir_solv_current, '*.merged.solv'))) for solv_file in solv_prior: logger.debug(solv_file.replace(cache_dir_solv, '')) print('-> do_create_droplist') # Reset to product after solv_cache_update(). self.options.output_dir = product_dir self.do_create_droplist('create_droplist', opts, *solv_prior) delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') file_utils.unlink_list(product_dir, delete_products) print('-> product service') for product_file in glob.glob(os.path.join(product_dir, '*.product')): print(subprocess.check_output( [PRODUCT_SERVICE, product_file, product_dir, project])) for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '): delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi)) file_utils.unlink_list(product_dir, delete_kiwis) if scope == 'staging': self.strip_medium_from_staging(product_dir) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) file_utils.move_list(spec_files, release_dir) inc_files = glob.glob(os.path.join(group_dir, '*.inc')) file_utils.move_list(inc_files, release_dir) file_utils.multibuild_from_glob(release_dir, '*.spec') self.build_stub(release_dir, 'spec') self.commit_package(release_dir) if only_release_packages: return file_utils.multibuild_from_glob(product_dir, '*.kiwi') self.build_stub(product_dir, 'kiwi') self.commit_package(product_dir) if api.item_exists(project, '000product-summary'): summary_str = "# Summary of packages in groups" for group in sorted(summary.keys()): # the unsorted group should appear filtered by # unneeded.yml - so we need the content of unsorted.yml # not unsorted.group (this grew a little unnaturally) if group == 'unsorted': continue summary_str += "\n" + group + ":\n" for package in sorted(summary[group]): summary_str += " - " + package + "\n" source_file_ensure(api.apiurl, project, '000product-summary', 'summary.yml', summary_str, 'Updating summary.yml') unsorted_yml = open(os.path.join(product_dir, 'unsorted.yml')).read() source_file_ensure(api.apiurl, project, '000product-summary', 'unsorted.yml', unsorted_yml, 'Updating unsorted.yml')