2016-10-06 18:40:51 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
import sys
|
|
|
|
import cmdln
|
|
|
|
import logging
|
2018-11-16 08:32:25 +01:00
|
|
|
try:
|
|
|
|
from urllib.error import HTTPError
|
|
|
|
except ImportError:
|
|
|
|
# python 2.x
|
|
|
|
from urllib2 import HTTPError
|
2016-10-06 18:40:51 +02:00
|
|
|
import osc.core
|
|
|
|
|
|
|
|
import ToolBase
|
|
|
|
|
|
|
|
logger = logging.getLogger()
|
|
|
|
|
|
|
|
FACTORY = "openSUSE:Factory"
|
|
|
|
|
|
|
|
class BiArchTool(ToolBase.ToolBase):
|
|
|
|
|
|
|
|
def __init__(self, project):
|
|
|
|
ToolBase.ToolBase.__init__(self)
|
|
|
|
self.project = project
|
|
|
|
self.biarch_packages = None
|
2017-05-09 17:33:03 +02:00
|
|
|
self._has_baselibs = dict()
|
2016-11-22 14:12:13 +01:00
|
|
|
self.packages = []
|
2016-11-22 14:19:07 +01:00
|
|
|
self.arch = 'i586'
|
2017-05-09 17:33:03 +02:00
|
|
|
self.rdeps = None
|
2017-11-28 09:29:11 +01:00
|
|
|
self.package_metas = dict()
|
2017-05-09 17:33:03 +02:00
|
|
|
self.whitelist = {
|
|
|
|
'i586': set([
|
2017-06-28 18:58:02 +02:00
|
|
|
'bzr',
|
|
|
|
'git',
|
2017-06-29 17:56:18 +02:00
|
|
|
# _link to baselibs package
|
|
|
|
'libjpeg62-turbo',
|
2017-06-28 18:58:02 +02:00
|
|
|
'mercurial',
|
|
|
|
'subversion',
|
2017-05-09 17:33:03 +02:00
|
|
|
'ovmf'
|
|
|
|
]),
|
|
|
|
}
|
2017-06-02 13:46:24 +02:00
|
|
|
self.blacklist = {
|
|
|
|
'i586': set([
|
2017-06-28 18:58:02 +02:00
|
|
|
'belle-sip',
|
2017-06-02 13:46:24 +02:00
|
|
|
'release-notes-openSUSE',
|
2017-07-03 17:21:52 +02:00
|
|
|
'openSUSE-EULAs', # translate-toolkit
|
2017-06-02 13:46:24 +02:00
|
|
|
'skelcd-openSUSE',
|
|
|
|
'plasma5-workspace',
|
2017-11-30 17:16:13 +01:00
|
|
|
'patterns-base',
|
|
|
|
'patterns-fonts',
|
|
|
|
'patterns-rpm-macros',
|
|
|
|
'patterns-yast',
|
2017-12-01 16:04:35 +01:00
|
|
|
'000release-packages',
|
2017-06-02 13:46:24 +02:00
|
|
|
]),
|
|
|
|
}
|
2017-05-09 17:33:03 +02:00
|
|
|
|
2017-07-11 17:44:35 +02:00
|
|
|
def get_filelist(self, project, package, expand = False):
|
|
|
|
query = {}
|
|
|
|
if expand:
|
|
|
|
query['expand'] = 1
|
|
|
|
root = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, package], query)))
|
|
|
|
return [ node.get('name') for node in root.findall('entry') ]
|
2017-07-03 17:22:04 +02:00
|
|
|
|
2017-05-09 17:33:03 +02:00
|
|
|
def has_baselibs(self, package):
|
|
|
|
if package in self._has_baselibs:
|
|
|
|
return self._has_baselibs[package]
|
|
|
|
|
2017-11-28 09:29:11 +01:00
|
|
|
is_multibuild = False
|
|
|
|
srcpkgname = package
|
|
|
|
if ':' in package:
|
|
|
|
is_multibuild = True
|
|
|
|
srcpkgname = package.split(':')[0]
|
|
|
|
|
2017-05-09 17:33:03 +02:00
|
|
|
ret = False
|
2017-11-28 09:29:11 +01:00
|
|
|
files = self.get_filelist(self.project, srcpkgname)
|
2017-07-11 17:44:35 +02:00
|
|
|
if 'baselibs.conf' in files:
|
2017-05-09 17:33:03 +02:00
|
|
|
logger.debug('%s has baselibs', package)
|
2017-11-28 09:29:11 +01:00
|
|
|
if is_multibuild:
|
2019-05-11 14:25:02 +02:00
|
|
|
logger.warning('%s is multibuild and has baselibs. canot handle that!', package)
|
2017-11-28 09:29:11 +01:00
|
|
|
else:
|
|
|
|
ret = True
|
2017-07-11 17:44:35 +02:00
|
|
|
elif '_link' in files:
|
2017-11-28 09:29:11 +01:00
|
|
|
files = self.get_filelist(self.project, srcpkgname, expand = True)
|
2017-07-11 17:44:35 +02:00
|
|
|
if 'baselibs.conf' in files:
|
2019-05-11 14:25:02 +02:00
|
|
|
logger.warning('%s is linked to a baselibs package', package)
|
2017-11-28 09:29:11 +01:00
|
|
|
elif is_multibuild:
|
2019-05-11 14:25:02 +02:00
|
|
|
logger.warning('%s is multibuild', package)
|
2017-05-09 17:33:03 +02:00
|
|
|
self._has_baselibs[package] = ret
|
|
|
|
return ret
|
|
|
|
|
2017-06-02 13:46:24 +02:00
|
|
|
def is_biarch_recursive(self, package):
|
2017-11-28 09:29:11 +01:00
|
|
|
logger.debug(package)
|
2017-06-02 13:46:24 +02:00
|
|
|
if package in self.blacklist[self.arch]:
|
|
|
|
logger.debug('%s is blacklisted', package)
|
|
|
|
return False
|
|
|
|
if package in self.biarch_packages:
|
|
|
|
logger.debug('%s is known biarch package', package)
|
|
|
|
return True
|
|
|
|
if package in self.whitelist[self.arch]:
|
|
|
|
logger.debug('%s is whitelisted', package)
|
|
|
|
return True
|
2017-05-09 17:33:03 +02:00
|
|
|
r = self.has_baselibs(package)
|
2017-06-02 13:46:24 +02:00
|
|
|
if r:
|
|
|
|
return r
|
|
|
|
if package in self.rdeps:
|
2017-05-09 17:33:03 +02:00
|
|
|
for p in self.rdeps[package]:
|
2017-06-02 13:46:24 +02:00
|
|
|
r = self.is_biarch_recursive(p)
|
2017-05-09 17:33:03 +02:00
|
|
|
if r:
|
|
|
|
break
|
|
|
|
return r
|
2016-10-06 18:40:51 +02:00
|
|
|
|
|
|
|
def _init_biarch_packages(self):
|
|
|
|
if self.biarch_packages is None:
|
2017-12-01 15:56:23 +01:00
|
|
|
if ':Rings' in self.project:
|
|
|
|
self.biarch_packages = set()
|
|
|
|
else:
|
|
|
|
self.biarch_packages = set(self.meta_get_packagelist("%s:Rings:0-Bootstrap"%self.project))
|
|
|
|
self.biarch_packages |= set(self.meta_get_packagelist("%s:Rings:1-MinimalX"%self.project))
|
2016-10-06 18:40:51 +02:00
|
|
|
|
2017-05-09 17:33:03 +02:00
|
|
|
self._init_rdeps()
|
2017-11-28 09:29:11 +01:00
|
|
|
self.fill_package_meta()
|
|
|
|
|
|
|
|
def fill_package_meta(self):
|
|
|
|
url = self.makeurl(['search', 'package'], "match=[@project='%s']" % self.project)
|
|
|
|
root = ET.fromstring(self.cached_GET(url))
|
|
|
|
for p in root.findall('package'):
|
|
|
|
name = p.attrib['name']
|
|
|
|
self.package_metas[name] = p
|
2017-05-09 17:33:03 +02:00
|
|
|
|
|
|
|
def _init_rdeps(self):
|
|
|
|
if self.rdeps is not None:
|
|
|
|
return
|
|
|
|
self.rdeps = dict()
|
2017-10-20 08:54:37 +02:00
|
|
|
url = self.makeurl(['build', self.project, 'standard', self.arch, '_builddepinfo' ], {'view': 'revpkgnames'})
|
2017-05-09 17:33:03 +02:00
|
|
|
x = ET.fromstring(self.cached_GET(url))
|
|
|
|
for pnode in x.findall('package'):
|
|
|
|
name = pnode.get('name')
|
|
|
|
for depnode in pnode.findall('pkgdep'):
|
|
|
|
depname = depnode.text
|
2017-11-28 09:29:11 +01:00
|
|
|
if depname == name:
|
2019-05-11 14:25:02 +02:00
|
|
|
logger.warning('%s requires itself for build', name)
|
2017-11-28 09:29:11 +01:00
|
|
|
continue
|
2017-05-09 17:33:03 +02:00
|
|
|
self.rdeps.setdefault(name, set()).add(depname)
|
|
|
|
|
2016-11-22 14:12:13 +01:00
|
|
|
def select_packages(self, packages):
|
|
|
|
if packages == '__all__':
|
|
|
|
self.packages = self.meta_get_packagelist(self.project)
|
|
|
|
elif packages == '__latest__':
|
2017-06-29 17:56:04 +02:00
|
|
|
# only works when called in packagelists loop
|
|
|
|
#self.packages = self._filter_packages_by_time(self.latest_packages(self.project))
|
|
|
|
self.packages = self.latest_packages(self.project)
|
2016-11-22 14:12:13 +01:00
|
|
|
else:
|
|
|
|
self.packages = packages
|
|
|
|
|
2018-03-29 13:09:46 +02:00
|
|
|
# check when 000product was last changed, eg by packagelist
|
2017-05-05 14:52:26 +02:00
|
|
|
# generator. Yield only packges that got checked in after that
|
|
|
|
# point in time.
|
|
|
|
def _filter_packages_by_time(self, packages):
|
2018-03-29 13:09:46 +02:00
|
|
|
x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, '000product', '_history'], {'limit': '1'})))
|
2017-05-05 14:52:26 +02:00
|
|
|
producttime = int(x.find('./revision/time').text)
|
|
|
|
for pkg in packages:
|
2017-06-02 13:46:24 +02:00
|
|
|
try:
|
2017-10-20 08:54:37 +02:00
|
|
|
x = ET.fromstring(self.cached_GET(self.makeurl(['source', self.project, pkg, '_history'], {'rev': '1'})))
|
2017-06-02 13:46:24 +02:00
|
|
|
# catch deleted packages
|
2018-11-16 08:32:25 +01:00
|
|
|
except HTTPError as e:
|
2017-06-02 13:46:24 +02:00
|
|
|
if e.code == 404:
|
|
|
|
continue
|
|
|
|
raise e
|
|
|
|
|
2017-05-05 14:52:26 +02:00
|
|
|
packagetime = int(x.find('./revision/time').text)
|
2017-05-09 17:33:03 +02:00
|
|
|
# if producttime > packagetime:
|
|
|
|
# continue
|
2017-05-05 14:52:26 +02:00
|
|
|
yield pkg
|
|
|
|
|
2016-11-30 17:42:22 +01:00
|
|
|
def remove_explicit_enable(self):
|
|
|
|
|
|
|
|
self._init_biarch_packages()
|
|
|
|
|
2017-05-05 14:20:46 +02:00
|
|
|
resulturl = self.makeurl(['build', self.project, '_result'])
|
2016-11-30 17:42:22 +01:00
|
|
|
result = ET.fromstring(self.cached_GET(resulturl))
|
|
|
|
|
|
|
|
packages = set()
|
|
|
|
|
|
|
|
for n in result.findall("./result[@arch='{}']/status".format(self.arch)):
|
|
|
|
if n.get('code') not in ('disabled', 'excluded'):
|
|
|
|
packages.add(n.get('package'))
|
|
|
|
|
|
|
|
for pkg in sorted(packages):
|
|
|
|
changed = False
|
|
|
|
|
|
|
|
logger.debug("processing %s", pkg)
|
2017-11-28 09:29:11 +01:00
|
|
|
if not pkg in self.package_metas:
|
|
|
|
logger.error("%s not found", pkg)
|
|
|
|
continue
|
|
|
|
pkgmeta = self.package_metas[pkg]
|
2016-11-30 17:42:22 +01:00
|
|
|
|
|
|
|
for build in pkgmeta.findall("./build"):
|
|
|
|
for n in build.findall("./enable[@arch='{}']".format(self.arch)):
|
|
|
|
logger.debug("disable %s", pkg)
|
|
|
|
build.remove(n)
|
|
|
|
changed = True
|
|
|
|
|
|
|
|
if changed:
|
|
|
|
try:
|
2017-11-28 09:29:11 +01:00
|
|
|
pkgmetaurl = self.makeurl(['source', self.project, pkg, '_meta'])
|
2016-11-30 17:42:22 +01:00
|
|
|
self.http_PUT(pkgmetaurl, data=ET.tostring(pkgmeta))
|
|
|
|
if self.caching:
|
|
|
|
self._invalidate__cached_GET(pkgmetaurl)
|
2018-11-16 08:32:25 +01:00
|
|
|
except HTTPError as e:
|
2016-11-30 17:42:22 +01:00
|
|
|
logger.error('failed to update %s: %s', pkg, e)
|
|
|
|
|
2017-05-05 14:20:46 +02:00
|
|
|
def add_explicit_disable(self, wipebinaries=False):
|
2016-11-30 17:42:22 +01:00
|
|
|
|
|
|
|
self._init_biarch_packages()
|
|
|
|
|
2017-05-05 14:20:46 +02:00
|
|
|
resulturl = self.makeurl(['source', self.project])
|
2016-11-30 17:42:22 +01:00
|
|
|
result = ET.fromstring(self.cached_GET(resulturl))
|
|
|
|
|
|
|
|
for pkg in self.packages:
|
|
|
|
|
|
|
|
changed = False
|
|
|
|
|
|
|
|
logger.debug("processing %s", pkg)
|
2017-11-28 09:29:11 +01:00
|
|
|
if not pkg in self.package_metas:
|
|
|
|
logger.error("%s not found", pkg)
|
|
|
|
continue
|
|
|
|
pkgmeta = self.package_metas[pkg]
|
2016-11-30 17:42:22 +01:00
|
|
|
|
|
|
|
build = pkgmeta.findall("./build")
|
|
|
|
if not build:
|
|
|
|
logger.debug('disable %s for %s', pkg, self.arch)
|
|
|
|
bn = pkgmeta.find('build')
|
|
|
|
if bn is None:
|
|
|
|
bn = ET.SubElement(pkgmeta, 'build')
|
|
|
|
ET.SubElement(bn, 'disable', { 'arch' : self.arch })
|
|
|
|
changed = True
|
|
|
|
|
|
|
|
if changed:
|
|
|
|
try:
|
2017-11-28 09:29:11 +01:00
|
|
|
pkgmetaurl = self.makeurl(['source', self.project, pkg, '_meta'])
|
2016-11-30 17:42:22 +01:00
|
|
|
self.http_PUT(pkgmetaurl, data=ET.tostring(pkgmeta))
|
|
|
|
if self.caching:
|
|
|
|
self._invalidate__cached_GET(pkgmetaurl)
|
2017-05-05 14:20:46 +02:00
|
|
|
if wipebinaries:
|
|
|
|
self.http_POST(self.makeurl(['build', self.project], {
|
|
|
|
'cmd' : 'wipe',
|
|
|
|
'arch': self.arch,
|
|
|
|
'package' : pkg }))
|
2018-11-16 08:32:25 +01:00
|
|
|
except HTTPError as e:
|
2016-11-30 17:42:22 +01:00
|
|
|
logger.error('failed to update %s: %s', pkg, e)
|
|
|
|
|
|
|
|
|
2017-05-05 14:39:17 +02:00
|
|
|
def enable_baselibs_packages(self, force=False, wipebinaries=False):
|
2016-10-06 18:40:51 +02:00
|
|
|
self._init_biarch_packages()
|
2017-11-28 09:29:11 +01:00
|
|
|
todo = dict()
|
2016-11-22 14:12:13 +01:00
|
|
|
for pkg in self.packages:
|
2016-10-06 18:40:51 +02:00
|
|
|
logger.debug("processing %s", pkg)
|
2017-11-28 09:29:11 +01:00
|
|
|
if not pkg in self.package_metas:
|
|
|
|
logger.error("%s not found", pkg)
|
|
|
|
continue
|
|
|
|
pkgmeta = self.package_metas[pkg]
|
2017-06-02 13:46:24 +02:00
|
|
|
|
2016-10-06 18:40:51 +02:00
|
|
|
is_enabled = None
|
|
|
|
is_disabled = None
|
|
|
|
has_baselibs = None
|
2017-04-21 11:13:31 +02:00
|
|
|
must_disable = None
|
2016-10-06 18:40:51 +02:00
|
|
|
changed = None
|
|
|
|
|
2016-11-22 14:19:07 +01:00
|
|
|
for n in pkgmeta.findall("./build/enable[@arch='{}']".format(self.arch)):
|
2016-10-06 18:40:51 +02:00
|
|
|
is_enabled = True
|
2016-11-22 14:19:07 +01:00
|
|
|
for n in pkgmeta.findall("./build/disable[@arch='{}']".format(self.arch)):
|
2016-10-06 18:40:51 +02:00
|
|
|
is_disabled = True
|
2017-06-13 11:50:15 +02:00
|
|
|
|
|
|
|
if force:
|
|
|
|
must_disable = False
|
2017-06-02 13:46:24 +02:00
|
|
|
|
|
|
|
if must_disable is None:
|
|
|
|
if self.is_biarch_recursive(pkg):
|
2017-05-05 14:39:17 +02:00
|
|
|
must_disable = False
|
|
|
|
else:
|
|
|
|
must_disable = True
|
2016-10-06 18:40:51 +02:00
|
|
|
|
2017-04-21 11:13:31 +02:00
|
|
|
if must_disable == False:
|
2016-10-06 18:40:51 +02:00
|
|
|
if is_disabled:
|
2017-04-21 11:13:31 +02:00
|
|
|
logger.info('enabling %s for %s', pkg, self.arch)
|
|
|
|
for build in pkgmeta.findall("./build"):
|
|
|
|
for n in build.findall("./disable[@arch='{}']".format(self.arch)):
|
|
|
|
build.remove(n)
|
|
|
|
changed = True
|
|
|
|
if changed == False:
|
|
|
|
logger.error('build tag not found in %s/%s!?', pkg, self.arch)
|
2017-05-09 17:33:03 +02:00
|
|
|
else:
|
|
|
|
logger.debug('%s already enabled for %s', pkg, self.arch)
|
2017-04-21 11:13:31 +02:00
|
|
|
elif must_disable == True:
|
2017-05-09 17:33:03 +02:00
|
|
|
if not is_disabled:
|
2017-04-21 11:13:31 +02:00
|
|
|
logger.info('disabling %s for %s', pkg, self.arch)
|
2016-10-06 18:40:51 +02:00
|
|
|
bn = pkgmeta.find('build')
|
|
|
|
if bn is None:
|
|
|
|
bn = ET.SubElement(pkgmeta, 'build')
|
2017-04-21 11:13:31 +02:00
|
|
|
ET.SubElement(bn, 'disable', { 'arch' : self.arch })
|
2016-10-06 18:40:51 +02:00
|
|
|
changed = True
|
2017-05-09 17:33:03 +02:00
|
|
|
else:
|
|
|
|
logger.debug('%s already disabled for %s', pkg, self.arch)
|
2017-04-21 11:13:31 +02:00
|
|
|
|
|
|
|
if is_enabled:
|
|
|
|
logger.info('removing explicit enable %s for %s', pkg, self.arch)
|
|
|
|
for build in pkgmeta.findall("./build"):
|
|
|
|
for n in build.findall("./enable[@arch='{}']".format(self.arch)):
|
|
|
|
build.remove(n)
|
|
|
|
changed = True
|
|
|
|
if changed == False:
|
|
|
|
logger.error('build tag not found in %s/%s!?', pkg, self.arch)
|
2016-10-06 18:40:51 +02:00
|
|
|
|
|
|
|
if changed:
|
2017-11-28 09:29:11 +01:00
|
|
|
todo[pkg] = pkgmeta
|
|
|
|
|
|
|
|
if todo:
|
|
|
|
logger.info("applying changes")
|
|
|
|
for pkg in sorted(todo.keys()):
|
|
|
|
pkgmeta = todo[pkg]
|
|
|
|
try:
|
|
|
|
pkgmetaurl = self.makeurl(['source', self.project, pkg, '_meta'])
|
|
|
|
self.http_PUT(pkgmetaurl, data=ET.tostring(pkgmeta))
|
|
|
|
if self.caching:
|
|
|
|
self._invalidate__cached_GET(pkgmetaurl)
|
|
|
|
|
|
|
|
if wipebinaries and pkgmeta.find("./build/disable[@arch='{}']".format(self.arch)) is not None:
|
|
|
|
logger.debug("wiping %s", pkg)
|
|
|
|
self.http_POST(self.makeurl(['build', self.project], {
|
|
|
|
'cmd' : 'wipe',
|
|
|
|
'arch': self.arch,
|
|
|
|
'package' : pkg }))
|
2018-11-16 08:32:25 +01:00
|
|
|
except HTTPError as e:
|
2017-11-28 09:29:11 +01:00
|
|
|
logger.error('failed to update %s: %s', pkg, e)
|
2016-10-06 18:40:51 +02:00
|
|
|
|
|
|
|
class CommandLineInterface(ToolBase.CommandLineInterface):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
ToolBase.CommandLineInterface.__init__(self, args, kwargs)
|
|
|
|
|
|
|
|
def get_optparser(self):
|
|
|
|
parser = ToolBase.CommandLineInterface.get_optparser(self)
|
|
|
|
parser.add_option('-p', '--project', dest='project', metavar='PROJECT',
|
|
|
|
help='project to process (default: %s)' % FACTORY,
|
|
|
|
default = FACTORY)
|
|
|
|
return parser
|
|
|
|
|
|
|
|
def setup_tool(self):
|
|
|
|
tool = BiArchTool(self.options.project)
|
|
|
|
return tool
|
|
|
|
|
2016-11-22 14:12:13 +01:00
|
|
|
def _select_packages(self, all, packages):
|
|
|
|
if packages:
|
|
|
|
self.tool.select_packages(packages)
|
|
|
|
elif all:
|
|
|
|
self.tool.select_packages('__all__')
|
|
|
|
else:
|
|
|
|
self.tool.select_packages('__latest__')
|
2016-10-06 18:40:51 +02:00
|
|
|
|
|
|
|
@cmdln.option('-n', '--interval', metavar="minutes", type="int", help="periodic interval in minutes")
|
|
|
|
@cmdln.option('-a', '--all', action='store_true', help='process all packages')
|
|
|
|
@cmdln.option('-f', '--force', action='store_true', help='enable in any case')
|
2017-05-05 14:39:17 +02:00
|
|
|
@cmdln.option('--wipe', action='store_true', help='also wipe binaries')
|
2016-10-06 18:40:51 +02:00
|
|
|
def do_enable_baselibs_packages(self, subcmd, opts, *packages):
|
|
|
|
"""${cmd_name}: enable build for packages in Ring 0 or 1 or with
|
|
|
|
baselibs.conf
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
def work():
|
2016-11-22 14:12:13 +01:00
|
|
|
self._select_packages(opts.all, packages)
|
2017-05-05 14:39:17 +02:00
|
|
|
self.tool.enable_baselibs_packages(force=opts.force, wipebinaries=opts.wipe)
|
2016-10-06 18:40:51 +02:00
|
|
|
|
|
|
|
self.runner(work, opts.interval)
|
|
|
|
|
2016-11-30 17:42:22 +01:00
|
|
|
@cmdln.option('-a', '--all', action='store_true', help='process all packages')
|
|
|
|
def do_remove_explicit_enable(self, subcmd, opts, *packages):
|
|
|
|
"""${cmd_name}: remove all explicit enable tags from packages
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.tool.remove_explicit_enable()
|
|
|
|
|
|
|
|
@cmdln.option('-a', '--all', action='store_true', help='process all packages')
|
|
|
|
@cmdln.option('-n', '--interval', metavar="minutes", type="int", help="periodic interval in minutes")
|
2017-05-05 14:20:46 +02:00
|
|
|
@cmdln.option('--wipe', action='store_true', help='also wipe binaries')
|
2016-11-30 17:42:22 +01:00
|
|
|
def do_add_explicit_disable(self, subcmd, opts, *packages):
|
|
|
|
"""${cmd_name}: add explicit disable to all packages
|
|
|
|
|
|
|
|
${cmd_usage}
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
def work():
|
|
|
|
self._select_packages(opts.all, packages)
|
2017-05-05 14:20:46 +02:00
|
|
|
self.tool.add_explicit_disable(wipebinaries=opts.wipe)
|
2016-11-30 17:42:22 +01:00
|
|
|
|
|
|
|
self.runner(work, opts.interval)
|
|
|
|
|
2016-10-06 18:40:51 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
app = CommandLineInterface()
|
|
|
|
sys.exit( app.main() )
|