2014-07-15 17:08:48 +02:00
|
|
|
import re
|
2019-10-09 11:10:08 +02:00
|
|
|
import time
|
2018-04-26 13:28:25 +02:00
|
|
|
|
2019-10-09 11:10:08 +02:00
|
|
|
from urllib.error import HTTPError
|
2018-04-26 13:28:25 +02:00
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
import warnings
|
2014-07-15 17:08:48 +02:00
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
|
2017-09-26 18:53:05 +08:00
|
|
|
from osc.core import change_request_state, show_package_meta, wipebinaries
|
2016-11-16 13:45:27 +01:00
|
|
|
from osc.core import http_GET, http_PUT, http_DELETE, http_POST
|
2019-11-24 20:02:58 +01:00
|
|
|
from osc.core import delete_package, search, meta_get_packagelist
|
2019-10-09 11:10:08 +02:00
|
|
|
from osc.core import Request
|
2019-05-28 16:48:09 +08:00
|
|
|
from osc.util.helper import decode_it
|
2019-01-15 19:46:57 +08:00
|
|
|
from osclib.core import attribute_value_save
|
|
|
|
from osclib.core import attribute_value_load
|
2018-08-17 22:15:58 -05:00
|
|
|
from osclib.core import source_file_load
|
|
|
|
from osclib.core import source_file_save
|
2019-10-09 11:10:08 +02:00
|
|
|
from osclib.request_finder import RequestFinder
|
2014-07-15 10:43:11 +02:00
|
|
|
from datetime import date
|
2014-02-28 11:45:06 +01:00
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
|
2014-03-06 18:22:19 +01:00
|
|
|
class AcceptCommand(object):
|
2014-02-19 11:48:16 +01:00
|
|
|
def __init__(self, api):
|
|
|
|
self.api = api
|
|
|
|
|
2014-06-01 19:51:18 +02:00
|
|
|
def find_new_requests(self, project):
|
2019-11-24 11:00:47 +01:00
|
|
|
match = f"state/@name='new' and action/target/@project='{project}'"
|
|
|
|
url = self.api.makeurl(['search', 'request'], { 'match': match })
|
2017-09-26 18:53:05 +08:00
|
|
|
|
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
rqs = []
|
|
|
|
for rq in root.findall('request'):
|
2019-11-24 11:00:47 +01:00
|
|
|
for action in rq.findall('action'):
|
|
|
|
for t in action.findall('target'):
|
|
|
|
rqs.append({'id': int(rq.get('id')),
|
|
|
|
'package': str(t.get('package')),
|
|
|
|
'type': action.get('type')})
|
|
|
|
break
|
2017-09-26 18:53:05 +08:00
|
|
|
return rqs
|
|
|
|
|
2017-03-20 19:31:17 +08:00
|
|
|
def reset_rebuild_data(self, project):
|
2018-08-17 23:19:24 -05:00
|
|
|
data = self.api.pseudometa_file_load('support_pkg_rebuild')
|
2018-04-20 00:53:14 -05:00
|
|
|
if data is None:
|
2017-03-20 19:31:17 +08:00
|
|
|
return
|
2018-04-20 00:53:14 -05:00
|
|
|
|
|
|
|
root = ET.fromstring(data)
|
2017-03-20 19:31:17 +08:00
|
|
|
for stg in root.findall('staging'):
|
2017-08-18 18:52:16 +08:00
|
|
|
if stg.get('name') == project:
|
2017-03-20 19:31:17 +08:00
|
|
|
stg.find('rebuild').text = 'unknown'
|
2017-08-18 18:52:16 +08:00
|
|
|
stg.find('supportpkg').text = ''
|
2017-03-20 19:31:17 +08:00
|
|
|
|
2019-11-22 13:21:06 +01:00
|
|
|
# reset accepted staging project rebuild state to unknown and clean up
|
2017-08-18 18:52:16 +08:00
|
|
|
# supportpkg list
|
2017-03-20 19:31:17 +08:00
|
|
|
content = ET.tostring(root)
|
2018-04-20 00:53:14 -05:00
|
|
|
if content != data:
|
2018-08-17 23:19:24 -05:00
|
|
|
self.api.pseudometa_file_save('support_pkg_rebuild', content, 'accept command update')
|
2017-03-20 19:31:17 +08:00
|
|
|
|
2019-11-24 13:18:27 +01:00
|
|
|
def delete_linked(self):
|
|
|
|
for package in self.requests['delete']:
|
|
|
|
for link in self.api.linked_packages(package):
|
|
|
|
if link['project'] in self.api.rings or link['project'] == self.api.project:
|
|
|
|
print(f"delete {link['project']}/{link['package']}")
|
|
|
|
delete_package(self.api.apiurl, link['project'], link['package'],
|
|
|
|
msg="remove link while accepting delete of {}".format(package))
|
|
|
|
|
2019-11-22 13:21:06 +01:00
|
|
|
def accept_all(self, projects, force=False, cleanup=True):
|
2019-12-06 14:20:34 +01:00
|
|
|
accept_all_green = len(projects) == 0
|
|
|
|
if accept_all_green:
|
|
|
|
print('Accepting all acceptable projects')
|
|
|
|
if force:
|
|
|
|
print('ERROR: Not compatible with force option')
|
|
|
|
return False
|
|
|
|
|
2019-11-24 11:00:47 +01:00
|
|
|
self.requests = { 'delete': [], 'submit': [] }
|
2019-11-24 16:21:32 +01:00
|
|
|
staging_packages = {}
|
2019-11-24 11:00:47 +01:00
|
|
|
|
2019-12-06 14:20:34 +01:00
|
|
|
if accept_all_green:
|
|
|
|
projects = self.api.get_staging_projects()
|
|
|
|
|
2019-11-24 11:00:47 +01:00
|
|
|
for prj in projects:
|
|
|
|
project = self.api.prj_from_letter(prj)
|
|
|
|
|
|
|
|
status = self.api.project_status(project)
|
2019-12-06 14:20:34 +01:00
|
|
|
if status.get('state') != 'acceptable':
|
|
|
|
if accept_all_green:
|
|
|
|
continue
|
|
|
|
if not force:
|
|
|
|
print('The project "{}" is not yet acceptable.'.format(project))
|
|
|
|
return False
|
2019-11-24 11:00:47 +01:00
|
|
|
|
2019-11-24 16:21:32 +01:00
|
|
|
staging_packages[project] = []
|
2019-11-24 11:00:47 +01:00
|
|
|
for request in status.findall('staged_requests/request'):
|
|
|
|
self.requests[request.get('type')].append(request.get('package'))
|
2019-11-24 16:21:32 +01:00
|
|
|
staging_packages[project].append(request.get('package'))
|
2019-11-24 11:00:47 +01:00
|
|
|
|
|
|
|
other_new = self.find_new_requests(self.api.project)
|
|
|
|
for req in other_new:
|
|
|
|
self.requests[req['type']].append(req['package'])
|
|
|
|
|
2019-11-24 16:21:32 +01:00
|
|
|
print('delete links to packages pending deletion...')
|
2019-11-24 13:18:27 +01:00
|
|
|
self.delete_linked()
|
2019-11-24 16:21:32 +01:00
|
|
|
|
2019-12-09 10:08:47 +01:00
|
|
|
# we have checked ourselves and accepting one staging project creates a race
|
|
|
|
# for the other staging projects to appear building again
|
|
|
|
opts = { 'force': '1' }
|
2019-11-24 16:21:32 +01:00
|
|
|
|
|
|
|
print('triggering staging accepts...')
|
2019-12-06 14:20:34 +01:00
|
|
|
for project in staging_packages.keys():
|
2019-11-24 16:21:32 +01:00
|
|
|
u = self.api.makeurl(['staging', self.api.project, 'staging_projects', project, 'accept'], opts)
|
2019-12-06 14:20:34 +01:00
|
|
|
http_POST(u)
|
2019-11-24 11:00:47 +01:00
|
|
|
|
|
|
|
for req in other_new:
|
2019-11-24 16:21:32 +01:00
|
|
|
print(f"Accepting request {req['id']}: {req['package']}")
|
2019-11-24 11:00:47 +01:00
|
|
|
change_request_state(self.api.apiurl, str(req['id']), 'accepted', message='Accept to %s' % self.api.project)
|
2014-07-15 10:43:11 +02:00
|
|
|
|
2019-12-10 09:43:16 +01:00
|
|
|
for project in sorted(staging_packages.keys()):
|
2019-11-24 16:21:32 +01:00
|
|
|
print(f'waiting for staging project {project} to be accepted')
|
2019-10-09 11:10:08 +02:00
|
|
|
|
2019-11-24 16:21:32 +01:00
|
|
|
while True:
|
|
|
|
status = self.api.project_status(project, reload=True)
|
|
|
|
if status.get('state') == 'empty':
|
|
|
|
break
|
|
|
|
print('{} requests still staged - waiting'.format(status.find('staged_requests').get('count')))
|
|
|
|
time.sleep(1)
|
2019-11-22 13:21:55 +01:00
|
|
|
|
2019-11-24 16:21:32 +01:00
|
|
|
self.api.accept_status_comment(project, staging_packages[project])
|
2019-12-06 07:35:38 +01:00
|
|
|
if self.api.is_adi_project(project):
|
|
|
|
self.api.delete_empty_adi_project(project)
|
2019-12-10 09:43:16 +01:00
|
|
|
continue
|
2019-12-06 07:35:38 +01:00
|
|
|
|
2019-11-24 16:21:32 +01:00
|
|
|
self.api.staging_deactivate(project)
|
2019-10-09 11:10:08 +02:00
|
|
|
|
2019-12-10 09:43:16 +01:00
|
|
|
self.reset_rebuild_data(project)
|
2019-10-09 11:10:08 +02:00
|
|
|
|
2019-11-24 16:21:32 +01:00
|
|
|
if cleanup:
|
|
|
|
self.cleanup(project)
|
2014-05-27 18:32:51 +02:00
|
|
|
|
2019-11-24 17:26:55 +01:00
|
|
|
for package in self.requests['submit']:
|
|
|
|
self.fix_linking_packages(package)
|
|
|
|
|
2019-11-24 16:21:32 +01:00
|
|
|
if self.api.project.startswith('openSUSE:'):
|
|
|
|
self.update_factory_version()
|
2019-12-03 15:25:45 +01:00
|
|
|
if self.api.crebuild and self.api.item_exists(self.api.crebuild):
|
2019-11-24 16:21:32 +01:00
|
|
|
self.sync_buildfailures()
|
2014-06-01 19:51:18 +02:00
|
|
|
|
2019-11-24 19:21:14 +01:00
|
|
|
return True
|
|
|
|
|
2016-05-31 15:30:29 +02:00
|
|
|
def cleanup(self, project):
|
2016-06-02 14:55:02 +02:00
|
|
|
if not self.api.item_exists(project):
|
2019-11-24 19:21:14 +01:00
|
|
|
return
|
2016-06-02 14:55:02 +02:00
|
|
|
|
2016-05-31 15:30:29 +02:00
|
|
|
pkglist = self.api.list_packages(project)
|
2018-03-07 15:44:16 -06:00
|
|
|
clean_list = set(pkglist) - set(self.api.cnocleanup_packages)
|
2016-05-31 15:30:29 +02:00
|
|
|
|
|
|
|
for package in clean_list:
|
2018-04-26 13:28:25 +02:00
|
|
|
print("[cleanup] deleted %s/%s" % (project, package))
|
2016-05-31 15:30:29 +02:00
|
|
|
delete_package(self.api.apiurl, project, package, force=True, msg="autocleanup")
|
2016-11-15 17:46:03 +08:00
|
|
|
|
2019-11-24 19:21:14 +01:00
|
|
|
return
|
2016-05-31 15:30:29 +02:00
|
|
|
|
2019-11-24 20:02:58 +01:00
|
|
|
def check_local_links(self):
|
|
|
|
for package in meta_get_packagelist(self.api.apiurl, self.api.project):
|
|
|
|
self.fix_linking_packages(package, True)
|
|
|
|
|
|
|
|
def fix_linking_packages(self, package, dry=False):
|
2019-11-24 17:26:55 +01:00
|
|
|
project = self.api.project
|
|
|
|
file_list = self.api.get_filelist_for_package(package, project)
|
2019-12-04 17:11:21 +01:00
|
|
|
# ignore linked packages
|
|
|
|
if '_link' in file_list:
|
2019-11-24 17:26:55 +01:00
|
|
|
return
|
|
|
|
needed_links = set()
|
2019-12-04 17:11:21 +01:00
|
|
|
# if there's a multibuild we assume all flavors are built
|
|
|
|
# using multibuild. So any potential previous links have to
|
|
|
|
# be removed ie set of needed_links left empty.
|
|
|
|
if '_multibuild' not in file_list:
|
|
|
|
for file in file_list:
|
|
|
|
if file.endswith('.spec') and file != f'{package}.spec':
|
|
|
|
needed_links.add(file[:-5])
|
2019-11-24 17:26:55 +01:00
|
|
|
local_links = set()
|
|
|
|
for link in self.api.linked_packages(package):
|
|
|
|
if link['project'] == project:
|
|
|
|
local_links.add(link['package'])
|
|
|
|
|
|
|
|
# Deleting all the packages that no longer have a .spec file
|
|
|
|
for link in local_links - needed_links:
|
|
|
|
print(f"Deleting package {project}/{link}")
|
2019-11-24 20:02:58 +01:00
|
|
|
if dry:
|
|
|
|
continue
|
2015-01-16 15:37:07 +00:00
|
|
|
try:
|
2019-11-24 17:26:55 +01:00
|
|
|
delete_package(self.api.apiurl, project, link, msg=f"No longer linking to {package}")
|
2018-04-26 13:28:25 +02:00
|
|
|
except HTTPError as err:
|
2015-01-16 15:37:07 +00:00
|
|
|
if err.code == 404:
|
|
|
|
# the package link was not yet created, which was likely a mistake from earlier
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# If the package was there bug could not be delete, raise the error
|
|
|
|
raise
|
2017-10-31 19:46:53 +08:00
|
|
|
|
|
|
|
# Remove package from Rings in case 2nd specfile was removed
|
2019-11-24 17:26:55 +01:00
|
|
|
if self.api.ring_packages.get(link):
|
|
|
|
delete_package(self.api.apiurl, self.api.ring_packages.get(link), link, force=True, msg="Cleanup package in Rings")
|
2017-10-31 19:46:53 +08:00
|
|
|
|
2019-11-24 17:26:55 +01:00
|
|
|
for link in needed_links - local_links:
|
2015-01-16 15:37:07 +00:00
|
|
|
# There is more than one .spec file in the package; link package containers as needed
|
2019-11-24 17:26:55 +01:00
|
|
|
meta = ET.fromstring(source_file_load(self.api.apiurl, project, package, '_meta'))
|
|
|
|
print(f"Creating new link {link}->{package}")
|
2019-11-24 20:02:58 +01:00
|
|
|
if dry:
|
|
|
|
continue
|
2019-11-24 17:26:55 +01:00
|
|
|
|
|
|
|
meta.attrib['name'] = link
|
|
|
|
bcnt = meta.find('bcntsynctag')
|
|
|
|
if bcnt is None:
|
|
|
|
bcnt = ET.SubElement(meta, 'bcntsynctag')
|
|
|
|
bcnt.text = package
|
|
|
|
devel = meta.find('devel')
|
|
|
|
if devel is None:
|
|
|
|
devel = ET.SubElement(meta, 'devel')
|
|
|
|
devel.attrib['project'] = project
|
|
|
|
devel.attrib['package'] = package
|
|
|
|
|
|
|
|
source_file_save(self.api.apiurl, project, link, '_meta', ET.tostring(meta))
|
|
|
|
xml = f"<link package='{package}' cicount='copy' />"
|
|
|
|
source_file_save(self.api.apiurl, project, link, '_link', xml)
|
2015-01-16 15:37:07 +00:00
|
|
|
|
2019-01-15 19:46:57 +08:00
|
|
|
def update_version_attribute(self, project, version):
|
|
|
|
version_attr = attribute_value_load(self.api.apiurl, project, 'ProductVersion')
|
|
|
|
if version_attr != version:
|
|
|
|
attribute_value_save(self.api.apiurl, project, 'ProductVersion', version)
|
|
|
|
|
2014-07-15 10:43:11 +02:00
|
|
|
def update_factory_version(self):
|
2015-02-19 10:57:55 +01:00
|
|
|
"""Update project (Factory, 13.2, ...) version if is necessary."""
|
|
|
|
|
|
|
|
# XXX TODO - This method have `factory` in the name. Can be
|
|
|
|
# missleading.
|
|
|
|
|
|
|
|
project = self.api.project
|
2014-07-15 17:08:48 +02:00
|
|
|
curr_version = date.today().strftime('%Y%m%d')
|
2019-01-15 19:46:57 +08:00
|
|
|
update_version_attr = False
|
2017-07-06 12:50:16 +02:00
|
|
|
url = self.api.makeurl(['source', project], {'view': 'productlist'})
|
|
|
|
|
|
|
|
products = ET.parse(http_GET(url)).getroot()
|
|
|
|
for product in products.findall('product'):
|
|
|
|
product_name = product.get('name') + '.product'
|
|
|
|
product_pkg = product.get('originpackage')
|
2019-05-14 17:50:31 +08:00
|
|
|
product_spec = source_file_load(self.api.apiurl, project, product_pkg, product_name)
|
2017-07-06 12:50:16 +02:00
|
|
|
new_product = re.sub(r'<version>\d{8}</version>', '<version>%s</version>' % curr_version, product_spec)
|
|
|
|
|
|
|
|
if product_spec != new_product:
|
2019-01-15 19:46:57 +08:00
|
|
|
update_version_attr = True
|
2019-12-10 08:46:02 +01:00
|
|
|
url = self.api.makeurl(['source', project, product_pkg, product_name])
|
2017-07-06 12:50:16 +02:00
|
|
|
http_PUT(url + '?comment=Update+version', data=new_product)
|
2015-02-02 15:02:57 +01:00
|
|
|
|
2019-01-15 19:46:57 +08:00
|
|
|
if update_version_attr:
|
|
|
|
self.update_version_attribute(project, curr_version)
|
|
|
|
|
2017-02-01 22:06:31 +01:00
|
|
|
ports_prjs = ['PowerPC', 'ARM', 'zSystems' ]
|
2015-05-03 16:21:10 +02:00
|
|
|
|
|
|
|
for ports in ports_prjs:
|
2015-07-03 09:22:09 +02:00
|
|
|
project = self.api.project + ':' + ports
|
2019-01-24 17:49:31 +08:00
|
|
|
if self.api.item_exists(project) and update_version_attr:
|
|
|
|
self.update_version_attribute(project, curr_version)
|
2015-05-03 16:21:10 +02:00
|
|
|
|
2015-02-02 15:02:57 +01:00
|
|
|
def sync_buildfailures(self):
|
2015-02-19 10:57:55 +01:00
|
|
|
"""
|
|
|
|
Trigger rebuild of packages that failed build in either
|
|
|
|
openSUSE:Factory or openSUSE:Factory:Rebuild, but not the
|
|
|
|
other Helps over the fact that openSUSE:Factory uses
|
|
|
|
rebuild=local, thus sometimes 'hiding' build failures.
|
|
|
|
"""
|
2015-02-02 15:02:57 +01:00
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
for arch in ["x86_64", "i586"]:
|
|
|
|
fact_result = self.api.get_prj_results(self.api.project, arch)
|
2015-02-02 15:02:57 +01:00
|
|
|
fact_result = self.api.check_pkgs(fact_result)
|
2015-02-19 10:57:55 +01:00
|
|
|
rebuild_result = self.api.get_prj_results(self.api.crebuild, arch)
|
2015-02-02 15:02:57 +01:00
|
|
|
rebuild_result = self.api.check_pkgs(rebuild_result)
|
|
|
|
result = set(rebuild_result) ^ set(fact_result)
|
|
|
|
|
2018-04-26 13:28:25 +02:00
|
|
|
print(sorted(result))
|
2015-02-02 15:02:57 +01:00
|
|
|
|
|
|
|
for package in result:
|
2015-02-19 10:57:55 +01:00
|
|
|
self.api.rebuild_pkg(package, self.api.project, arch, None)
|
|
|
|
self.api.rebuild_pkg(package, self.api.crebuild, arch, None)
|