2014-07-15 17:08:48 +02:00
|
|
|
import re
|
2015-02-26 14:48:40 +01:00
|
|
|
import urllib2
|
2015-02-19 10:57:55 +01:00
|
|
|
import warnings
|
2014-07-15 17:08:48 +02:00
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
|
2017-09-26 18:53:05 +08:00
|
|
|
from osc.core import change_request_state, show_package_meta, wipebinaries
|
2016-11-16 13:45:27 +01:00
|
|
|
from osc.core import http_GET, http_PUT, http_DELETE, http_POST
|
2017-09-26 18:53:05 +08:00
|
|
|
from osc.core import delete_package, search, set_devel_project
|
2014-07-15 10:43:11 +02:00
|
|
|
from datetime import date
|
2014-02-28 11:45:06 +01:00
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
|
2014-03-06 18:22:19 +01:00
|
|
|
class AcceptCommand(object):
|
2014-02-19 11:48:16 +01:00
|
|
|
def __init__(self, api):
|
|
|
|
self.api = api
|
|
|
|
|
2014-06-01 19:51:18 +02:00
|
|
|
def find_new_requests(self, project):
|
2017-09-26 18:53:05 +08:00
|
|
|
query = "match=state/@name='new'+and+action/target/@project='{}'".format(project)
|
|
|
|
url = self.api.makeurl(['search', 'request'], query)
|
|
|
|
|
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
rqs = []
|
|
|
|
for rq in root.findall('request'):
|
|
|
|
pkgs = []
|
|
|
|
act_type = None
|
|
|
|
actions = rq.findall('action')
|
|
|
|
for action in actions:
|
|
|
|
act_type = action.get('type')
|
|
|
|
targets = action.findall('target')
|
|
|
|
for t in targets:
|
|
|
|
pkgs.append(str(t.get('package')))
|
|
|
|
|
|
|
|
rqs.append({'id': int(rq.get('id')), 'packages': pkgs, 'type': act_type})
|
|
|
|
return rqs
|
|
|
|
|
|
|
|
def virtual_accept_request_has_no_binary(self, project, package):
|
|
|
|
filelist = self.api.get_filelist_for_package(pkgname=package, project=self.api.project, expand='1', extension='spec')
|
|
|
|
pkgs = self.api.extract_specfile_short(filelist)
|
|
|
|
|
|
|
|
for pkg in pkgs:
|
|
|
|
query = {'view': 'binarylist', 'package': pkg, 'multibuild': '1'}
|
|
|
|
pkg_binarylist = ET.parse(http_GET(self.api.makeurl(['build', project, '_result'], query=query))).getroot()
|
|
|
|
for binary in pkg_binarylist.findall('./result/binarylist/binary'):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def find_virtually_accepted_requests(self, project):
|
|
|
|
query = "match=state/@name='review'+and+(action/target/@project='{}'+and+action/@type='delete')+and+(review/@state='new'+and+review/@by_group='{}')".format(project, self.api.delreq_review)
|
2014-06-01 19:51:18 +02:00
|
|
|
url = self.api.makeurl(['search', 'request'], query)
|
|
|
|
|
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
2014-07-15 17:08:48 +02:00
|
|
|
|
2014-08-15 15:38:27 +02:00
|
|
|
rqs = []
|
2014-06-01 19:51:18 +02:00
|
|
|
for rq in root.findall('request'):
|
2014-08-15 15:38:27 +02:00
|
|
|
pkgs = []
|
|
|
|
actions = rq.findall('action')
|
|
|
|
for action in actions:
|
|
|
|
targets = action.findall('target')
|
|
|
|
for t in targets:
|
|
|
|
pkgs.append(str(t.get('package')))
|
|
|
|
|
2014-09-01 09:43:44 +02:00
|
|
|
rqs.append({'id': int(rq.get('id')), 'packages': pkgs})
|
2014-08-15 15:38:27 +02:00
|
|
|
return rqs
|
2014-07-15 17:08:48 +02:00
|
|
|
|
2017-03-20 19:31:17 +08:00
|
|
|
def reset_rebuild_data(self, project):
|
2017-03-24 17:25:00 +08:00
|
|
|
url = self.api.makeurl(['source', self.api.cstaging, 'dashboard', 'support_pkg_rebuild?expand=1'])
|
2017-03-20 19:31:17 +08:00
|
|
|
try:
|
|
|
|
data = http_GET(url)
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
return
|
|
|
|
tree = ET.parse(data)
|
|
|
|
root = tree.getroot()
|
|
|
|
for stg in root.findall('staging'):
|
2017-08-18 18:52:16 +08:00
|
|
|
if stg.get('name') == project:
|
2017-03-20 19:31:17 +08:00
|
|
|
stg.find('rebuild').text = 'unknown'
|
2017-08-18 18:52:16 +08:00
|
|
|
stg.find('supportpkg').text = ''
|
2017-03-20 19:31:17 +08:00
|
|
|
|
2017-08-18 18:52:16 +08:00
|
|
|
# reset accpted staging project rebuild state to unknown and clean up
|
|
|
|
# supportpkg list
|
2017-03-24 17:25:00 +08:00
|
|
|
url = self.api.makeurl(['source', self.api.cstaging, 'dashboard', 'support_pkg_rebuild'])
|
2017-03-20 19:31:17 +08:00
|
|
|
content = ET.tostring(root)
|
|
|
|
http_PUT(url + '?comment=accept+command+update', data=content)
|
|
|
|
|
2017-09-26 18:53:05 +08:00
|
|
|
def virtually_accept_delete(self, request_id, package):
|
|
|
|
self.api.add_review(request_id, by_group=self.api.delreq_review, msg='Request accepted. Cleanup in progress - DO NOT REVOKE!')
|
|
|
|
|
|
|
|
filelist = self.api.get_filelist_for_package(pkgname=package, project=self.api.project, expand='1', extension='spec')
|
|
|
|
pkgs = self.api.extract_specfile_short(filelist)
|
|
|
|
|
|
|
|
# Disable build and wipes the binary to the package and the sub-package
|
|
|
|
for pkg in pkgs:
|
|
|
|
meta = show_package_meta(self.api.apiurl, self.api.project, pkg)
|
|
|
|
meta = ''.join(meta)
|
|
|
|
# Update package meta to disable build
|
|
|
|
self.api.create_package_container(self.api.project, pkg, meta=meta, disable_build=True)
|
|
|
|
wipebinaries(self.api.apiurl, self.api.project, package=pkg, repo=self.api.main_repo)
|
|
|
|
|
|
|
|
# Remove package from Rings
|
|
|
|
if self.api.ring_packages.get(pkg):
|
|
|
|
delete_package(self.api.apiurl, self.api.ring_packages.get(pkg), pkg, force=True, msg="Cleanup package in Rings")
|
|
|
|
|
2016-05-13 15:01:37 +02:00
|
|
|
def perform(self, project, force=False):
|
2015-02-26 14:48:40 +01:00
|
|
|
"""Accept the staging project for review and submit to Factory /
|
2014-09-29 11:14:40 +02:00
|
|
|
openSUSE 13.2 ...
|
|
|
|
|
2014-02-26 14:51:01 +01:00
|
|
|
Then disable the build to disabled
|
|
|
|
:param project: staging project we are working with
|
2014-09-29 11:14:40 +02:00
|
|
|
|
2014-02-26 14:51:01 +01:00
|
|
|
"""
|
2014-07-15 10:43:11 +02:00
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
status = self.api.check_project_status(project)
|
|
|
|
|
|
|
|
if not status:
|
2014-09-01 09:43:44 +02:00
|
|
|
print('The project "{}" is not yet acceptable.'.format(project))
|
2016-05-13 15:01:37 +02:00
|
|
|
if not force:
|
|
|
|
return False
|
2014-02-19 11:48:16 +01:00
|
|
|
|
|
|
|
meta = self.api.get_prj_pseudometa(project)
|
2014-06-16 17:15:24 +02:00
|
|
|
packages = []
|
2014-02-19 11:48:16 +01:00
|
|
|
for req in meta['requests']:
|
|
|
|
self.api.rm_from_prj(project, request_id=req['id'], msg='ready to accept')
|
2014-06-16 17:15:24 +02:00
|
|
|
packages.append(req['package'])
|
2014-09-01 09:43:44 +02:00
|
|
|
msg = 'Accepting staging review for {}'.format(req['package'])
|
2014-05-20 12:26:29 +02:00
|
|
|
|
2015-02-26 14:48:40 +01:00
|
|
|
oldspecs = self.api.get_filelist_for_package(pkgname=req['package'],
|
|
|
|
project=self.api.project,
|
|
|
|
extension='spec')
|
2017-09-26 18:53:05 +08:00
|
|
|
if 'type' in req and req['type'] == 'delete' and self.api.delreq_review:
|
|
|
|
msg += ' and started handling of virtual accept process'
|
|
|
|
print(msg)
|
|
|
|
# Virtually accept the delete request
|
|
|
|
self.virtually_accept_delete(req['id'], req['package'])
|
|
|
|
else:
|
|
|
|
print(msg)
|
|
|
|
change_request_state(self.api.apiurl,
|
|
|
|
str(req['id']),
|
|
|
|
'accepted',
|
|
|
|
message='Accept to %s' % self.api.project)
|
|
|
|
self.create_new_links(self.api.project, req['package'], oldspecs)
|
2014-02-19 11:48:16 +01:00
|
|
|
|
2017-03-28 23:01:33 -05:00
|
|
|
self.api.accept_status_comment(project, packages)
|
2017-03-14 22:49:18 -05:00
|
|
|
self.api.staging_deactivate(project)
|
2014-05-27 18:32:51 +02:00
|
|
|
|
2014-06-01 19:51:18 +02:00
|
|
|
return True
|
|
|
|
|
2016-05-31 15:30:29 +02:00
|
|
|
def cleanup(self, project):
|
2016-06-02 14:55:02 +02:00
|
|
|
if not self.api.item_exists(project):
|
|
|
|
return False
|
|
|
|
|
2016-05-31 15:30:29 +02:00
|
|
|
pkglist = self.api.list_packages(project)
|
2016-06-02 14:55:02 +02:00
|
|
|
clean_list = set(pkglist) - set(self.api.cstaging_nocleanup)
|
2016-05-31 15:30:29 +02:00
|
|
|
|
|
|
|
for package in clean_list:
|
|
|
|
print "[cleanup] deleted %s/%s" % (project, package)
|
|
|
|
delete_package(self.api.apiurl, project, package, force=True, msg="autocleanup")
|
2016-11-15 17:46:03 +08:00
|
|
|
|
|
|
|
# wipe Test-DVD binaries and breaks kiwi build
|
|
|
|
if project.startswith('openSUSE:'):
|
|
|
|
for package in pkglist:
|
|
|
|
if package.startswith('Test-DVD-'):
|
|
|
|
# intend to break the kiwi file
|
|
|
|
arch = package.split('-')[-1]
|
|
|
|
fakepkgname = 'I-am-breaks-kiwi-build'
|
|
|
|
oldkiwifile = self.api.load_file_content(project, package, 'PRODUCT-'+arch+'.kiwi')
|
|
|
|
if oldkiwifile is not None:
|
2016-11-16 13:45:27 +01:00
|
|
|
newkiwifile = re.sub(r'<repopackage name="openSUSE-release"/>', '<repopackage name="%s"/>' % fakepkgname, oldkiwifile)
|
2016-11-15 17:46:03 +08:00
|
|
|
self.api.save_file_content(project, package, 'PRODUCT-' + arch + '.kiwi', newkiwifile)
|
|
|
|
|
|
|
|
# do wipe binary now
|
|
|
|
query = { 'cmd': 'wipe' }
|
|
|
|
query['package'] = package
|
|
|
|
query['repository'] = 'images'
|
|
|
|
|
|
|
|
url = self.api.makeurl(['build', project], query)
|
|
|
|
try:
|
|
|
|
http_POST(url)
|
2017-10-16 22:47:58 +02:00
|
|
|
except urllib2.HTTPError as err:
|
2016-11-15 17:46:03 +08:00
|
|
|
# failed to wipe isos but we can just continue
|
|
|
|
pass
|
|
|
|
|
2016-05-31 15:30:29 +02:00
|
|
|
return True
|
|
|
|
|
2014-06-01 19:51:18 +02:00
|
|
|
def accept_other_new(self):
|
2014-07-15 10:43:11 +02:00
|
|
|
changed = False
|
2017-09-26 18:53:05 +08:00
|
|
|
|
|
|
|
if self.api.delreq_review:
|
|
|
|
rqlist = self.find_virtually_accepted_requests(self.api.project)
|
|
|
|
for req in rqlist:
|
|
|
|
if self.virtual_accept_request_has_no_binary(self.api.project, req['packages'][0]):
|
|
|
|
# Accepting delreq-review review
|
|
|
|
self.api.do_change_review_state(req['id'], 'accepted',
|
|
|
|
by_group=self.api.delreq_review,
|
|
|
|
message='Virtually accepted delete {}'.format(req['packages'][0]))
|
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
rqlist = self.find_new_requests(self.api.project)
|
|
|
|
if self.api.cnonfree:
|
|
|
|
rqlist += self.find_new_requests(self.api.cnonfree)
|
|
|
|
|
2014-12-19 10:41:20 +01:00
|
|
|
for req in rqlist:
|
2015-02-19 10:57:55 +01:00
|
|
|
oldspecs = self.api.get_filelist_for_package(pkgname=req['packages'][0], project=self.api.project, extension='spec')
|
2014-09-01 09:43:44 +02:00
|
|
|
print 'Accepting request %d: %s' % (req['id'], ','.join(req['packages']))
|
2017-09-26 18:53:05 +08:00
|
|
|
if req['type'] == 'delete':
|
|
|
|
# Remove devel project/package tag before accepting the request
|
|
|
|
self.remove_obsoleted_develtag(self.api.project, req['packages'][0])
|
2015-02-19 10:57:55 +01:00
|
|
|
change_request_state(self.api.apiurl, str(req['id']), 'accepted', message='Accept to %s' % self.api.project)
|
2015-01-16 15:37:07 +00:00
|
|
|
# Check if all .spec files of the package we just accepted has a package container to build
|
2015-02-19 10:57:55 +01:00
|
|
|
self.create_new_links(self.api.project, req['packages'][0], oldspecs)
|
2014-07-15 10:43:11 +02:00
|
|
|
changed = True
|
2014-06-01 19:51:18 +02:00
|
|
|
|
2014-07-15 10:43:11 +02:00
|
|
|
return changed
|
|
|
|
|
2017-09-26 18:53:05 +08:00
|
|
|
def remove_obsoleted_develtag(self, project, package):
|
|
|
|
xpath = {
|
|
|
|
'package': "@project='%s' and devel/@project=@project and devel/@package='%s'" % (project, package),
|
|
|
|
}
|
|
|
|
collection = search(self.api.apiurl, **xpath)['package']
|
|
|
|
for pkg in collection.findall('package'):
|
|
|
|
set_devel_project(self.api.apiurl, project, pkg.attrib['name'], devprj=None)
|
|
|
|
|
2015-01-16 15:37:07 +00:00
|
|
|
def create_new_links(self, project, pkgname, oldspeclist):
|
|
|
|
filelist = self.api.get_filelist_for_package(pkgname=pkgname, project=project, extension='spec')
|
|
|
|
removedspecs = set(oldspeclist) - set(filelist)
|
|
|
|
for spec in removedspecs:
|
|
|
|
# Deleting all the packages that no longer have a .spec file
|
|
|
|
url = self.api.makeurl(['source', project, spec[:-5]])
|
|
|
|
print "Deleting package %s from project %s" % (spec[:-5], project)
|
|
|
|
try:
|
|
|
|
http_DELETE(url)
|
2017-10-16 22:47:58 +02:00
|
|
|
except urllib2.HTTPError as err:
|
2015-01-16 15:37:07 +00:00
|
|
|
if err.code == 404:
|
|
|
|
# the package link was not yet created, which was likely a mistake from earlier
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# If the package was there bug could not be delete, raise the error
|
|
|
|
raise
|
|
|
|
if len(filelist) > 1:
|
|
|
|
# There is more than one .spec file in the package; link package containers as needed
|
|
|
|
origmeta = self.api.load_file_content(project, pkgname, '_meta')
|
|
|
|
for specfile in filelist:
|
2015-02-26 14:48:40 +01:00
|
|
|
package = specfile[:-5] # stripping .spec off the filename gives the packagename
|
2015-01-16 15:37:07 +00:00
|
|
|
if package == pkgname:
|
|
|
|
# This is the original package and does not need to be linked to itself
|
|
|
|
continue
|
|
|
|
# Check if the target package already exists, if it does not, we get a HTTP error 404 to catch
|
|
|
|
if not self.api.item_exists(project, package):
|
|
|
|
print "Creating new package %s linked to %s" % (package, pkgname)
|
|
|
|
# new package does not exist. Let's link it with new metadata
|
2015-02-26 14:48:40 +01:00
|
|
|
newmeta = re.sub(r'(<package.*name=.){}'.format(pkgname),
|
|
|
|
r'\1{}'.format(package),
|
|
|
|
origmeta)
|
2016-06-15 13:01:37 +02:00
|
|
|
newmeta = re.sub(r'<devel.*>',
|
2016-06-19 10:49:28 +02:00
|
|
|
r'<devel package="{}"/>'.format(pkgname),
|
2015-02-26 14:48:40 +01:00
|
|
|
newmeta)
|
|
|
|
newmeta = re.sub(r'<bcntsynctag>.*</bcntsynctag>',
|
|
|
|
r'',
|
|
|
|
newmeta)
|
|
|
|
newmeta = re.sub(r'</package>',
|
|
|
|
r'<bcntsynctag>{}</bcntsynctag></package>'.format(pkgname),
|
|
|
|
newmeta)
|
2015-01-16 15:37:07 +00:00
|
|
|
self.api.save_file_content(project, package, '_meta', newmeta)
|
|
|
|
link = "<link package=\"{}\" cicount=\"copy\" />".format(pkgname)
|
|
|
|
self.api.save_file_content(project, package, '_link', link)
|
|
|
|
return True
|
|
|
|
|
2014-07-15 10:43:11 +02:00
|
|
|
def update_factory_version(self):
|
2015-02-19 10:57:55 +01:00
|
|
|
"""Update project (Factory, 13.2, ...) version if is necessary."""
|
|
|
|
|
|
|
|
# XXX TODO - This method have `factory` in the name. Can be
|
|
|
|
# missleading.
|
|
|
|
|
|
|
|
project = self.api.project
|
2014-07-15 17:08:48 +02:00
|
|
|
curr_version = date.today().strftime('%Y%m%d')
|
2017-07-06 12:50:16 +02:00
|
|
|
url = self.api.makeurl(['source', project], {'view': 'productlist'})
|
|
|
|
|
|
|
|
products = ET.parse(http_GET(url)).getroot()
|
|
|
|
for product in products.findall('product'):
|
|
|
|
product_name = product.get('name') + '.product'
|
|
|
|
product_pkg = product.get('originpackage')
|
|
|
|
url = self.api.makeurl(['source', project, product_pkg, product_name])
|
|
|
|
product_spec = http_GET(url).read()
|
|
|
|
new_product = re.sub(r'<version>\d{8}</version>', '<version>%s</version>' % curr_version, product_spec)
|
|
|
|
|
|
|
|
if product_spec != new_product:
|
|
|
|
http_PUT(url + '?comment=Update+version', data=new_product)
|
2015-02-02 15:02:57 +01:00
|
|
|
|
2015-05-03 16:21:10 +02:00
|
|
|
service = {'cmd': 'runservice'}
|
|
|
|
|
2017-02-01 22:06:31 +01:00
|
|
|
ports_prjs = ['PowerPC', 'ARM', 'zSystems' ]
|
2015-05-03 16:21:10 +02:00
|
|
|
|
|
|
|
for ports in ports_prjs:
|
2015-07-03 09:22:09 +02:00
|
|
|
project = self.api.project + ':' + ports
|
2015-05-03 16:21:10 +02:00
|
|
|
if self.api.item_exists(project):
|
|
|
|
baseurl = ['source', project, '_product']
|
|
|
|
url = self.api.makeurl(baseurl, query=service)
|
|
|
|
self.api.retried_POST(url)
|
|
|
|
|
2015-02-02 15:02:57 +01:00
|
|
|
def sync_buildfailures(self):
|
2015-02-19 10:57:55 +01:00
|
|
|
"""
|
|
|
|
Trigger rebuild of packages that failed build in either
|
|
|
|
openSUSE:Factory or openSUSE:Factory:Rebuild, but not the
|
|
|
|
other Helps over the fact that openSUSE:Factory uses
|
|
|
|
rebuild=local, thus sometimes 'hiding' build failures.
|
|
|
|
"""
|
2015-02-02 15:02:57 +01:00
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
for arch in ["x86_64", "i586"]:
|
|
|
|
fact_result = self.api.get_prj_results(self.api.project, arch)
|
2015-02-02 15:02:57 +01:00
|
|
|
fact_result = self.api.check_pkgs(fact_result)
|
2015-02-19 10:57:55 +01:00
|
|
|
rebuild_result = self.api.get_prj_results(self.api.crebuild, arch)
|
2015-02-02 15:02:57 +01:00
|
|
|
rebuild_result = self.api.check_pkgs(rebuild_result)
|
|
|
|
result = set(rebuild_result) ^ set(fact_result)
|
|
|
|
|
|
|
|
print sorted(result)
|
|
|
|
|
|
|
|
for package in result:
|
2015-02-19 10:57:55 +01:00
|
|
|
self.api.rebuild_pkg(package, self.api.project, arch, None)
|
|
|
|
self.api.rebuild_pkg(package, self.api.crebuild, arch, None)
|