2014-07-15 17:08:48 +02:00
|
|
|
import re
|
2019-10-09 11:10:08 +02:00
|
|
|
import time
|
2018-04-26 13:28:25 +02:00
|
|
|
|
2019-10-09 11:10:08 +02:00
|
|
|
from urllib.error import HTTPError
|
2018-04-26 13:28:25 +02:00
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
import warnings
|
2014-07-15 17:08:48 +02:00
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
|
2017-09-26 18:53:05 +08:00
|
|
|
from osc.core import change_request_state, show_package_meta, wipebinaries
|
2016-11-16 13:45:27 +01:00
|
|
|
from osc.core import http_GET, http_PUT, http_DELETE, http_POST
|
2017-09-26 18:53:05 +08:00
|
|
|
from osc.core import delete_package, search, set_devel_project
|
2019-10-09 11:10:08 +02:00
|
|
|
from osc.core import Request
|
2019-05-28 16:48:09 +08:00
|
|
|
from osc.util.helper import decode_it
|
2019-01-15 19:46:57 +08:00
|
|
|
from osclib.core import attribute_value_save
|
|
|
|
from osclib.core import attribute_value_load
|
2018-08-17 22:15:58 -05:00
|
|
|
from osclib.core import source_file_load
|
|
|
|
from osclib.core import source_file_save
|
2019-10-09 11:10:08 +02:00
|
|
|
from osclib.request_finder import RequestFinder
|
2014-07-15 10:43:11 +02:00
|
|
|
from datetime import date
|
2014-02-28 11:45:06 +01:00
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
|
2014-03-06 18:22:19 +01:00
|
|
|
class AcceptCommand(object):
|
2014-02-19 11:48:16 +01:00
|
|
|
def __init__(self, api):
|
|
|
|
self.api = api
|
|
|
|
|
2014-06-01 19:51:18 +02:00
|
|
|
def find_new_requests(self, project):
|
2017-09-26 18:53:05 +08:00
|
|
|
query = "match=state/@name='new'+and+action/target/@project='{}'".format(project)
|
|
|
|
url = self.api.makeurl(['search', 'request'], query)
|
|
|
|
|
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
rqs = []
|
|
|
|
for rq in root.findall('request'):
|
|
|
|
pkgs = []
|
|
|
|
act_type = None
|
|
|
|
actions = rq.findall('action')
|
|
|
|
for action in actions:
|
|
|
|
act_type = action.get('type')
|
|
|
|
targets = action.findall('target')
|
|
|
|
for t in targets:
|
|
|
|
pkgs.append(str(t.get('package')))
|
|
|
|
|
|
|
|
rqs.append({'id': int(rq.get('id')), 'packages': pkgs, 'type': act_type})
|
|
|
|
return rqs
|
|
|
|
|
2017-03-20 19:31:17 +08:00
|
|
|
def reset_rebuild_data(self, project):
|
2018-08-17 23:19:24 -05:00
|
|
|
data = self.api.pseudometa_file_load('support_pkg_rebuild')
|
2018-04-20 00:53:14 -05:00
|
|
|
if data is None:
|
2017-03-20 19:31:17 +08:00
|
|
|
return
|
2018-04-20 00:53:14 -05:00
|
|
|
|
|
|
|
root = ET.fromstring(data)
|
2017-03-20 19:31:17 +08:00
|
|
|
for stg in root.findall('staging'):
|
2017-08-18 18:52:16 +08:00
|
|
|
if stg.get('name') == project:
|
2017-03-20 19:31:17 +08:00
|
|
|
stg.find('rebuild').text = 'unknown'
|
2017-08-18 18:52:16 +08:00
|
|
|
stg.find('supportpkg').text = ''
|
2017-03-20 19:31:17 +08:00
|
|
|
|
2017-08-18 18:52:16 +08:00
|
|
|
# reset accpted staging project rebuild state to unknown and clean up
|
|
|
|
# supportpkg list
|
2017-03-20 19:31:17 +08:00
|
|
|
content = ET.tostring(root)
|
2018-04-20 00:53:14 -05:00
|
|
|
if content != data:
|
2018-08-17 23:19:24 -05:00
|
|
|
self.api.pseudometa_file_save('support_pkg_rebuild', content, 'accept command update')
|
2017-03-20 19:31:17 +08:00
|
|
|
|
2016-05-13 15:01:37 +02:00
|
|
|
def perform(self, project, force=False):
|
2015-02-26 14:48:40 +01:00
|
|
|
"""Accept the staging project for review and submit to Factory /
|
2019-10-09 11:10:08 +02:00
|
|
|
Leap ...
|
2014-09-29 11:14:40 +02:00
|
|
|
|
2014-02-26 14:51:01 +01:00
|
|
|
Then disable the build to disabled
|
|
|
|
:param project: staging project we are working with
|
2014-09-29 11:14:40 +02:00
|
|
|
|
2014-02-26 14:51:01 +01:00
|
|
|
"""
|
2014-07-15 10:43:11 +02:00
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
status = self.api.check_project_status(project)
|
|
|
|
|
|
|
|
if not status:
|
2014-09-01 09:43:44 +02:00
|
|
|
print('The project "{}" is not yet acceptable.'.format(project))
|
2016-05-13 15:01:37 +02:00
|
|
|
if not force:
|
|
|
|
return False
|
2014-02-19 11:48:16 +01:00
|
|
|
|
2019-10-09 11:10:08 +02:00
|
|
|
status = self.api.project_status(project)
|
2014-06-16 17:15:24 +02:00
|
|
|
packages = []
|
2014-02-19 11:48:16 +01:00
|
|
|
|
2019-10-09 11:10:08 +02:00
|
|
|
rf = RequestFinder(self.api)
|
|
|
|
oldspecs = {}
|
2019-10-28 16:12:26 +01:00
|
|
|
for req in status.findall('staged_requests/request'):
|
2019-10-09 11:10:08 +02:00
|
|
|
packages.append(req.get('package'))
|
|
|
|
|
|
|
|
print('Checking file list of {}'.format(req.get('package')))
|
|
|
|
os = self.api.get_filelist_for_package(pkgname=req.get('package'),
|
|
|
|
project=self.api.project,
|
|
|
|
extension='spec')
|
|
|
|
oldspecs[req.get('package')] = os
|
|
|
|
#self.create_new_links(self.api.project, req['package'], oldspecs)
|
|
|
|
|
|
|
|
print(oldspecs)
|
|
|
|
|
|
|
|
u = self.api.makeurl(['staging', self.api.project, 'staging_projects', project, 'accept'])
|
|
|
|
f = http_POST(u)
|
|
|
|
|
2019-11-06 14:29:02 +01:00
|
|
|
while self.api.project_status(project, reload=True, requests=False).get('state') != 'empty':
|
2019-10-09 11:10:08 +02:00
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
self.api.accept_status_comment(project, packages)
|
2019-11-20 09:23:04 +01:00
|
|
|
self.api.staging_deactivate(project)
|
2014-05-27 18:32:51 +02:00
|
|
|
|
2014-06-01 19:51:18 +02:00
|
|
|
return True
|
|
|
|
|
2016-05-31 15:30:29 +02:00
|
|
|
def cleanup(self, project):
|
2016-06-02 14:55:02 +02:00
|
|
|
if not self.api.item_exists(project):
|
|
|
|
return False
|
|
|
|
|
2016-05-31 15:30:29 +02:00
|
|
|
pkglist = self.api.list_packages(project)
|
2018-03-07 15:44:16 -06:00
|
|
|
clean_list = set(pkglist) - set(self.api.cnocleanup_packages)
|
2016-05-31 15:30:29 +02:00
|
|
|
|
|
|
|
for package in clean_list:
|
2018-04-26 13:28:25 +02:00
|
|
|
print("[cleanup] deleted %s/%s" % (project, package))
|
2016-05-31 15:30:29 +02:00
|
|
|
delete_package(self.api.apiurl, project, package, force=True, msg="autocleanup")
|
2016-11-15 17:46:03 +08:00
|
|
|
|
|
|
|
# wipe Test-DVD binaries and breaks kiwi build
|
|
|
|
if project.startswith('openSUSE:'):
|
|
|
|
for package in pkglist:
|
|
|
|
if package.startswith('Test-DVD-'):
|
|
|
|
# intend to break the kiwi file
|
|
|
|
arch = package.split('-')[-1]
|
|
|
|
fakepkgname = 'I-am-breaks-kiwi-build'
|
2018-08-17 22:15:58 -05:00
|
|
|
oldkiwifile = source_file_load(self.api.apiurl, project, package, 'PRODUCT-'+arch+'.kiwi')
|
2016-11-15 17:46:03 +08:00
|
|
|
if oldkiwifile is not None:
|
2016-11-16 13:45:27 +01:00
|
|
|
newkiwifile = re.sub(r'<repopackage name="openSUSE-release"/>', '<repopackage name="%s"/>' % fakepkgname, oldkiwifile)
|
2018-08-17 22:15:58 -05:00
|
|
|
source_file_save(self.api.apiurl, project, package, 'PRODUCT-' + arch + '.kiwi', newkiwifile)
|
2016-11-15 17:46:03 +08:00
|
|
|
|
|
|
|
# do wipe binary now
|
|
|
|
query = { 'cmd': 'wipe' }
|
|
|
|
query['package'] = package
|
|
|
|
query['repository'] = 'images'
|
|
|
|
|
|
|
|
url = self.api.makeurl(['build', project], query)
|
|
|
|
try:
|
|
|
|
http_POST(url)
|
2018-04-26 13:28:25 +02:00
|
|
|
except HTTPError as err:
|
2016-11-15 17:46:03 +08:00
|
|
|
# failed to wipe isos but we can just continue
|
|
|
|
pass
|
|
|
|
|
2016-05-31 15:30:29 +02:00
|
|
|
return True
|
|
|
|
|
2014-06-01 19:51:18 +02:00
|
|
|
def accept_other_new(self):
|
2014-07-15 10:43:11 +02:00
|
|
|
changed = False
|
2017-09-26 18:53:05 +08:00
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
rqlist = self.find_new_requests(self.api.project)
|
2014-12-19 10:41:20 +01:00
|
|
|
for req in rqlist:
|
2015-02-19 10:57:55 +01:00
|
|
|
oldspecs = self.api.get_filelist_for_package(pkgname=req['packages'][0], project=self.api.project, extension='spec')
|
2018-04-26 13:28:25 +02:00
|
|
|
print('Accepting request %d: %s' % (req['id'], ','.join(req['packages'])))
|
2017-09-26 18:53:05 +08:00
|
|
|
if req['type'] == 'delete':
|
|
|
|
# Remove devel project/package tag before accepting the request
|
|
|
|
self.remove_obsoleted_develtag(self.api.project, req['packages'][0])
|
2015-02-19 10:57:55 +01:00
|
|
|
change_request_state(self.api.apiurl, str(req['id']), 'accepted', message='Accept to %s' % self.api.project)
|
2015-01-16 15:37:07 +00:00
|
|
|
# Check if all .spec files of the package we just accepted has a package container to build
|
2015-02-19 10:57:55 +01:00
|
|
|
self.create_new_links(self.api.project, req['packages'][0], oldspecs)
|
2014-07-15 10:43:11 +02:00
|
|
|
changed = True
|
2014-06-01 19:51:18 +02:00
|
|
|
|
2014-07-15 10:43:11 +02:00
|
|
|
return changed
|
|
|
|
|
2017-09-26 18:53:05 +08:00
|
|
|
def remove_obsoleted_develtag(self, project, package):
|
|
|
|
xpath = {
|
|
|
|
'package': "@project='%s' and devel/@project=@project and devel/@package='%s'" % (project, package),
|
|
|
|
}
|
|
|
|
collection = search(self.api.apiurl, **xpath)['package']
|
|
|
|
for pkg in collection.findall('package'):
|
|
|
|
set_devel_project(self.api.apiurl, project, pkg.attrib['name'], devprj=None)
|
|
|
|
|
2015-01-16 15:37:07 +00:00
|
|
|
def create_new_links(self, project, pkgname, oldspeclist):
|
|
|
|
filelist = self.api.get_filelist_for_package(pkgname=pkgname, project=project, extension='spec')
|
|
|
|
removedspecs = set(oldspeclist) - set(filelist)
|
|
|
|
for spec in removedspecs:
|
|
|
|
# Deleting all the packages that no longer have a .spec file
|
|
|
|
url = self.api.makeurl(['source', project, spec[:-5]])
|
2018-04-26 13:28:25 +02:00
|
|
|
print("Deleting package %s from project %s" % (spec[:-5], project))
|
2015-01-16 15:37:07 +00:00
|
|
|
try:
|
|
|
|
http_DELETE(url)
|
2018-04-26 13:28:25 +02:00
|
|
|
except HTTPError as err:
|
2015-01-16 15:37:07 +00:00
|
|
|
if err.code == 404:
|
|
|
|
# the package link was not yet created, which was likely a mistake from earlier
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# If the package was there bug could not be delete, raise the error
|
|
|
|
raise
|
2017-10-31 19:46:53 +08:00
|
|
|
|
|
|
|
# Remove package from Rings in case 2nd specfile was removed
|
|
|
|
if self.api.ring_packages.get(spec[:-5]):
|
|
|
|
delete_package(self.api.apiurl, self.api.ring_packages.get(spec[:-5]), spec[:-5], force=True, msg="Cleanup package in Rings")
|
|
|
|
|
2015-01-16 15:37:07 +00:00
|
|
|
if len(filelist) > 1:
|
|
|
|
# There is more than one .spec file in the package; link package containers as needed
|
2018-08-17 22:15:58 -05:00
|
|
|
origmeta = source_file_load(self.api.apiurl, project, pkgname, '_meta')
|
2015-01-16 15:37:07 +00:00
|
|
|
for specfile in filelist:
|
2015-02-26 14:48:40 +01:00
|
|
|
package = specfile[:-5] # stripping .spec off the filename gives the packagename
|
2015-01-16 15:37:07 +00:00
|
|
|
if package == pkgname:
|
|
|
|
# This is the original package and does not need to be linked to itself
|
|
|
|
continue
|
|
|
|
# Check if the target package already exists, if it does not, we get a HTTP error 404 to catch
|
|
|
|
if not self.api.item_exists(project, package):
|
2018-04-26 13:28:25 +02:00
|
|
|
print("Creating new package %s linked to %s" % (package, pkgname))
|
2015-01-16 15:37:07 +00:00
|
|
|
# new package does not exist. Let's link it with new metadata
|
2015-02-26 14:48:40 +01:00
|
|
|
newmeta = re.sub(r'(<package.*name=.){}'.format(pkgname),
|
|
|
|
r'\1{}'.format(package),
|
|
|
|
origmeta)
|
2016-06-15 13:01:37 +02:00
|
|
|
newmeta = re.sub(r'<devel.*>',
|
2016-06-19 10:49:28 +02:00
|
|
|
r'<devel package="{}"/>'.format(pkgname),
|
2015-02-26 14:48:40 +01:00
|
|
|
newmeta)
|
|
|
|
newmeta = re.sub(r'<bcntsynctag>.*</bcntsynctag>',
|
|
|
|
r'',
|
|
|
|
newmeta)
|
|
|
|
newmeta = re.sub(r'</package>',
|
|
|
|
r'<bcntsynctag>{}</bcntsynctag></package>'.format(pkgname),
|
|
|
|
newmeta)
|
2018-08-17 22:15:58 -05:00
|
|
|
source_file_save(self.api.apiurl, project, package, '_meta', newmeta)
|
2015-01-16 15:37:07 +00:00
|
|
|
link = "<link package=\"{}\" cicount=\"copy\" />".format(pkgname)
|
2018-08-17 22:15:58 -05:00
|
|
|
source_file_save(self.api.apiurl, project, package, '_link', link)
|
2015-01-16 15:37:07 +00:00
|
|
|
return True
|
|
|
|
|
2019-01-15 19:46:57 +08:00
|
|
|
def update_version_attribute(self, project, version):
|
|
|
|
version_attr = attribute_value_load(self.api.apiurl, project, 'ProductVersion')
|
|
|
|
if version_attr != version:
|
|
|
|
attribute_value_save(self.api.apiurl, project, 'ProductVersion', version)
|
|
|
|
|
2014-07-15 10:43:11 +02:00
|
|
|
def update_factory_version(self):
|
2015-02-19 10:57:55 +01:00
|
|
|
"""Update project (Factory, 13.2, ...) version if is necessary."""
|
|
|
|
|
|
|
|
# XXX TODO - This method have `factory` in the name. Can be
|
|
|
|
# missleading.
|
|
|
|
|
|
|
|
project = self.api.project
|
2014-07-15 17:08:48 +02:00
|
|
|
curr_version = date.today().strftime('%Y%m%d')
|
2019-01-15 19:46:57 +08:00
|
|
|
update_version_attr = False
|
2017-07-06 12:50:16 +02:00
|
|
|
url = self.api.makeurl(['source', project], {'view': 'productlist'})
|
|
|
|
|
|
|
|
products = ET.parse(http_GET(url)).getroot()
|
|
|
|
for product in products.findall('product'):
|
|
|
|
product_name = product.get('name') + '.product'
|
|
|
|
product_pkg = product.get('originpackage')
|
2019-05-14 17:50:31 +08:00
|
|
|
product_spec = source_file_load(self.api.apiurl, project, product_pkg, product_name)
|
2017-07-06 12:50:16 +02:00
|
|
|
new_product = re.sub(r'<version>\d{8}</version>', '<version>%s</version>' % curr_version, product_spec)
|
|
|
|
|
|
|
|
if product_spec != new_product:
|
2019-01-15 19:46:57 +08:00
|
|
|
update_version_attr = True
|
2019-05-24 17:59:37 +08:00
|
|
|
url = self.api.makeurl(['source', project, product_pkg, product_name])
|
2017-07-06 12:50:16 +02:00
|
|
|
http_PUT(url + '?comment=Update+version', data=new_product)
|
2015-02-02 15:02:57 +01:00
|
|
|
|
2019-01-15 19:46:57 +08:00
|
|
|
if update_version_attr:
|
|
|
|
self.update_version_attribute(project, curr_version)
|
|
|
|
|
2017-02-01 22:06:31 +01:00
|
|
|
ports_prjs = ['PowerPC', 'ARM', 'zSystems' ]
|
2015-05-03 16:21:10 +02:00
|
|
|
|
|
|
|
for ports in ports_prjs:
|
2015-07-03 09:22:09 +02:00
|
|
|
project = self.api.project + ':' + ports
|
2019-01-24 17:49:31 +08:00
|
|
|
if self.api.item_exists(project) and update_version_attr:
|
|
|
|
self.update_version_attribute(project, curr_version)
|
2015-05-03 16:21:10 +02:00
|
|
|
|
2015-02-02 15:02:57 +01:00
|
|
|
def sync_buildfailures(self):
|
2015-02-19 10:57:55 +01:00
|
|
|
"""
|
|
|
|
Trigger rebuild of packages that failed build in either
|
|
|
|
openSUSE:Factory or openSUSE:Factory:Rebuild, but not the
|
|
|
|
other Helps over the fact that openSUSE:Factory uses
|
|
|
|
rebuild=local, thus sometimes 'hiding' build failures.
|
|
|
|
"""
|
2015-02-02 15:02:57 +01:00
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
for arch in ["x86_64", "i586"]:
|
|
|
|
fact_result = self.api.get_prj_results(self.api.project, arch)
|
2015-02-02 15:02:57 +01:00
|
|
|
fact_result = self.api.check_pkgs(fact_result)
|
2015-02-19 10:57:55 +01:00
|
|
|
rebuild_result = self.api.get_prj_results(self.api.crebuild, arch)
|
2015-02-02 15:02:57 +01:00
|
|
|
rebuild_result = self.api.check_pkgs(rebuild_result)
|
|
|
|
result = set(rebuild_result) ^ set(fact_result)
|
|
|
|
|
2018-04-26 13:28:25 +02:00
|
|
|
print(sorted(result))
|
2015-02-02 15:02:57 +01:00
|
|
|
|
|
|
|
for package in result:
|
2015-02-19 10:57:55 +01:00
|
|
|
self.api.rebuild_pkg(package, self.api.project, arch, None)
|
|
|
|
self.api.rebuild_pkg(package, self.api.crebuild, arch, None)
|