2017-06-22 23:16:27 -05:00
|
|
|
from collections import namedtuple
|
2022-03-04 09:42:27 +01:00
|
|
|
from datetime import datetime, timezone
|
2018-01-25 21:26:27 -06:00
|
|
|
from dateutil.parser import parse as date_parse
|
2017-06-22 23:16:27 -05:00
|
|
|
import re
|
2018-08-17 22:06:27 -05:00
|
|
|
import socket
|
2022-03-04 09:42:27 +01:00
|
|
|
import logging
|
2022-02-18 10:16:17 +01:00
|
|
|
from lxml import etree as ET
|
2019-10-22 09:26:17 +02:00
|
|
|
from urllib.error import HTTPError
|
2022-10-10 09:37:23 +02:00
|
|
|
from typing import Optional
|
2017-06-14 23:50:28 -05:00
|
|
|
|
2019-08-07 17:26:38 -05:00
|
|
|
from osc.core import create_submit_request
|
2017-06-22 23:16:27 -05:00
|
|
|
from osc.core import get_binarylist
|
2018-08-21 02:04:39 -05:00
|
|
|
from osc.core import get_commitlog
|
2017-06-20 16:37:16 -05:00
|
|
|
from osc.core import get_dependson
|
2019-11-06 11:21:35 -06:00
|
|
|
from osc.core import http_DELETE
|
2017-06-14 23:50:28 -05:00
|
|
|
from osc.core import http_GET
|
2018-08-16 21:38:11 -05:00
|
|
|
from osc.core import http_POST
|
2018-08-17 22:06:27 -05:00
|
|
|
from osc.core import http_PUT
|
2017-06-14 23:50:28 -05:00
|
|
|
from osc.core import makeurl
|
2017-06-22 23:12:45 -05:00
|
|
|
from osc.core import owner
|
2023-01-25 16:42:42 +01:00
|
|
|
from osc.core import search as osc_core_search
|
2018-01-29 03:14:24 -06:00
|
|
|
from osc.core import Request
|
2021-07-06 13:34:59 +02:00
|
|
|
from osc.core import Action
|
2018-01-17 18:02:37 -06:00
|
|
|
from osc.core import show_package_meta
|
2017-06-14 23:50:57 -05:00
|
|
|
from osc.core import show_project_meta
|
2018-08-21 02:04:20 -05:00
|
|
|
from osc.core import show_results_meta
|
2019-07-11 16:24:27 -05:00
|
|
|
from osc.core import xpath_join
|
2019-05-14 17:50:31 +08:00
|
|
|
from osc.util.helper import decode_it
|
2019-09-25 14:35:25 -05:00
|
|
|
from osc import conf
|
2018-08-17 22:09:23 -05:00
|
|
|
from osclib.conf import Config
|
2017-06-14 23:50:28 -05:00
|
|
|
from osclib.memoize import memoize
|
2019-09-11 18:18:39 -05:00
|
|
|
import traceback
|
2017-06-14 23:50:28 -05:00
|
|
|
|
2018-04-24 17:20:39 -05:00
|
|
|
BINARY_REGEX = r'(?:.*::)?(?P<filename>(?P<name>.*)-(?P<version>[^-]+)-(?P<release>[^-]+)\.(?P<arch>[^-\.]+))'
|
2018-10-24 16:29:15 -05:00
|
|
|
RPM_REGEX = BINARY_REGEX + r'\.rpm'
|
2017-08-02 21:04:34 -05:00
|
|
|
BinaryParsed = namedtuple('BinaryParsed', ('package', 'filename', 'name', 'arch'))
|
2019-08-07 17:26:38 -05:00
|
|
|
REQUEST_STATES_MINUS_ACCEPTED = ['new', 'review', 'declined', 'revoked', 'superseded']
|
2017-06-22 23:16:27 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-03-05 17:17:35 -06:00
|
|
|
@memoize(session=True)
|
|
|
|
def group_members(apiurl, group, maintainers=False):
|
|
|
|
url = makeurl(apiurl, ['group', group])
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2018-03-05 17:17:35 -06:00
|
|
|
|
|
|
|
if maintainers:
|
|
|
|
return root.xpath('maintainer/@userid')
|
|
|
|
|
|
|
|
return root.xpath('person/person/@userid')
|
2018-08-16 00:27:37 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-16 00:27:37 -05:00
|
|
|
def groups_members(apiurl, groups):
|
|
|
|
members = []
|
|
|
|
|
|
|
|
for group in groups:
|
|
|
|
members.extend(group_members(apiurl, group))
|
|
|
|
|
|
|
|
return members
|
2017-05-11 23:31:27 -05:00
|
|
|
|
2023-01-25 16:42:42 +01:00
|
|
|
|
|
|
|
def get_request_list_with_history(
|
|
|
|
apiurl, project='', package='', req_who='', req_state=('new', 'review', 'declined'),
|
|
|
|
req_type=None, exclude_target_projects=[]):
|
|
|
|
"""using an xpath search to get full request history. deprecated copy of old code from osc 0.x."""
|
2023-01-25 18:42:12 +01:00
|
|
|
|
|
|
|
import warnings
|
|
|
|
warnings.warn(
|
|
|
|
"get_request_list_with_history() uses an xpath search, which is slow. consider porting to"
|
|
|
|
"use osc.core.get_request_collection() instead.",
|
|
|
|
DeprecationWarning
|
|
|
|
)
|
|
|
|
|
2023-01-25 16:42:42 +01:00
|
|
|
xpath = ''
|
|
|
|
if 'all' not in req_state:
|
|
|
|
for state in req_state:
|
|
|
|
xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
|
|
|
|
if req_who:
|
|
|
|
xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
|
|
|
|
|
|
|
|
# XXX: we cannot use the '|' in the xpath expression because it is not supported
|
|
|
|
# in the backend
|
|
|
|
todo = {}
|
|
|
|
if project:
|
|
|
|
todo['project'] = project
|
|
|
|
if package:
|
|
|
|
todo['package'] = package
|
|
|
|
for kind, val in todo.items():
|
|
|
|
xpath_base = 'action/target/@%(kind)s=\'%(val)s\''
|
|
|
|
if conf.config['include_request_from_project']:
|
|
|
|
xpath_base = xpath_join(xpath_base, 'action/source/@%(kind)s=\'%(val)s\'', op='or', inner=True)
|
|
|
|
xpath = xpath_join(xpath, xpath_base % {'kind': kind, 'val': val}, op='and', nexpr_parentheses=True)
|
|
|
|
|
|
|
|
if req_type:
|
|
|
|
xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
|
|
|
|
for i in exclude_target_projects:
|
|
|
|
xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
|
|
|
|
|
|
|
|
if conf.config['verbose'] > 1:
|
|
|
|
print('[ %s ]' % xpath)
|
|
|
|
queries = {}
|
|
|
|
queries['request'] = {'withfullhistory': '1'}
|
|
|
|
res = osc_core_search(apiurl, queries=queries, request=xpath)
|
|
|
|
collection = res['request']
|
|
|
|
requests = []
|
|
|
|
for root in collection.findall('request'):
|
|
|
|
r = Request()
|
|
|
|
r.read(root)
|
|
|
|
requests.append(r)
|
|
|
|
return requests
|
2022-02-18 17:15:48 +01:00
|
|
|
|
|
|
|
|
2022-02-18 10:16:17 +01:00
|
|
|
def convert_from_osc_et(xml):
|
2023-01-25 16:42:42 +01:00
|
|
|
"""osc uses xml.etree while we rely on lxml."""
|
2022-02-18 10:16:17 +01:00
|
|
|
from xml.etree import ElementTree as oscET
|
|
|
|
return ET.fromstring(oscET.tostring(xml))
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2017-05-11 23:31:27 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def owner_fallback(apiurl, project, package):
|
2017-06-22 23:12:45 -05:00
|
|
|
root = owner(apiurl, package, project=project)
|
|
|
|
entry = root.find('owner')
|
2017-08-18 16:03:04 -05:00
|
|
|
if not entry or project.startswith(entry.get('project')):
|
2017-05-11 23:31:27 -05:00
|
|
|
# Fallback to global (ex Factory) maintainer.
|
2017-06-22 23:12:45 -05:00
|
|
|
root = owner(apiurl, package)
|
2022-02-18 10:16:17 +01:00
|
|
|
return convert_from_osc_et(root)
|
2017-05-11 23:31:27 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2017-06-20 16:34:35 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def maintainers_get(apiurl, project, package=None):
|
|
|
|
if package is None:
|
2022-02-18 10:16:17 +01:00
|
|
|
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
2018-08-16 00:31:37 -05:00
|
|
|
maintainers = meta.xpath('//person[@role="maintainer"]/@userid')
|
2017-06-20 16:34:35 -05:00
|
|
|
|
2018-08-16 00:31:37 -05:00
|
|
|
groups = meta.xpath('//group[@role="maintainer"]/@groupid')
|
|
|
|
maintainers.extend(groups_members(apiurl, groups))
|
|
|
|
|
|
|
|
return maintainers
|
|
|
|
|
2017-05-11 23:31:27 -05:00
|
|
|
root = owner_fallback(apiurl, project, package)
|
2018-08-16 00:31:37 -05:00
|
|
|
maintainers = root.xpath('//person[@role="maintainer"]/@name')
|
|
|
|
|
|
|
|
groups = root.xpath('//group[@role="maintainer"]/@name')
|
|
|
|
maintainers.extend(groups_members(apiurl, groups))
|
|
|
|
|
2017-05-11 23:31:27 -05:00
|
|
|
return maintainers
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-01 14:02:30 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def package_role_expand(apiurl, project, package, role='maintainer', inherit=True):
|
2021-07-06 13:34:59 +02:00
|
|
|
"""
|
|
|
|
All users with a certain role on a package, including those who have the role directly assigned
|
|
|
|
and those who are part of a group with that role.
|
|
|
|
"""
|
2022-02-18 10:16:17 +01:00
|
|
|
meta = ET.fromstringlist(show_package_meta(apiurl, project, package))
|
2019-11-01 14:02:30 -05:00
|
|
|
users = meta_role_expand(apiurl, meta, role)
|
|
|
|
|
|
|
|
if inherit:
|
|
|
|
users.extend(project_role_expand(apiurl, project, role))
|
|
|
|
|
|
|
|
return users
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-01 14:02:09 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def project_role_expand(apiurl, project, role='maintainer'):
|
2021-07-06 13:34:59 +02:00
|
|
|
"""
|
|
|
|
All users with a certain role on a project, including those who have the role directly assigned
|
|
|
|
and those who are part of a group with that role.
|
|
|
|
"""
|
2022-02-18 10:16:17 +01:00
|
|
|
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
2019-11-01 14:02:09 -05:00
|
|
|
return meta_role_expand(apiurl, meta, role)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-01 14:01:39 -05:00
|
|
|
def meta_role_expand(apiurl, meta, role='maintainer'):
|
|
|
|
users = meta.xpath('//person[@role="{}"]/@userid'.format(role))
|
|
|
|
|
|
|
|
groups = meta.xpath('//group[@role="{}"]/@groupid'.format(role))
|
|
|
|
users.extend(groups_members(apiurl, groups))
|
|
|
|
|
|
|
|
return users
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2022-03-23 15:40:06 +01:00
|
|
|
def package_list(apiurl, project, expand=True):
|
|
|
|
query = {}
|
|
|
|
if expand:
|
|
|
|
query['expand'] = 1
|
|
|
|
url = makeurl(apiurl, ['source', project], query)
|
2017-06-14 23:50:28 -05:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
|
|
|
packages = []
|
|
|
|
for package in root.findall('entry'):
|
|
|
|
packages.append(package.get('name'))
|
|
|
|
|
|
|
|
return sorted(packages)
|
2017-06-14 23:50:57 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2017-06-14 23:50:57 -05:00
|
|
|
@memoize(session=True)
|
2018-08-21 01:58:46 -05:00
|
|
|
def target_archs(apiurl, project, repository='standard'):
|
2022-02-18 10:16:17 +01:00
|
|
|
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
2018-08-21 01:58:46 -05:00
|
|
|
return meta.xpath('repository[@name="{}"]/arch/text()'.format(repository))
|
2017-06-20 16:37:16 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2017-06-20 16:37:16 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def depends_on(apiurl, project, repository, packages=None, reverse=None):
|
|
|
|
dependencies = set()
|
2018-08-21 01:58:46 -05:00
|
|
|
for arch in target_archs(apiurl, project, repository):
|
2017-06-20 16:37:16 -05:00
|
|
|
root = ET.fromstring(get_dependson(apiurl, project, repository, arch, packages, reverse))
|
|
|
|
dependencies.update(pkgdep.text for pkgdep in root.findall('.//pkgdep'))
|
|
|
|
|
|
|
|
return dependencies
|
2017-06-22 23:14:31 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2017-06-22 23:14:31 -05:00
|
|
|
def request_when_staged(request, project, first=False):
|
|
|
|
when = None
|
|
|
|
for history in request.statehistory:
|
|
|
|
if project in history.comment:
|
|
|
|
when = history.when
|
|
|
|
|
2018-01-25 21:26:27 -06:00
|
|
|
return date_parse(when)
|
2017-06-22 23:14:31 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2017-06-22 23:16:27 -05:00
|
|
|
def binary_list(apiurl, project, repository, arch, package=None):
|
|
|
|
parsed = []
|
|
|
|
for binary in get_binarylist(apiurl, project, repository, arch, package):
|
2017-08-02 21:04:08 -05:00
|
|
|
result = re.match(RPM_REGEX, binary)
|
2017-06-22 23:16:27 -05:00
|
|
|
if not result:
|
|
|
|
continue
|
|
|
|
|
2017-08-02 21:04:08 -05:00
|
|
|
name = result.group('name')
|
2017-06-22 23:16:27 -05:00
|
|
|
if name.endswith('-debuginfo') or name.endswith('-debuginfo-32bit'):
|
|
|
|
continue
|
|
|
|
if name.endswith('-debugsource'):
|
|
|
|
continue
|
2017-08-02 21:04:08 -05:00
|
|
|
if result.group('arch') == 'src':
|
2017-06-22 23:16:27 -05:00
|
|
|
continue
|
|
|
|
|
2017-08-02 21:04:34 -05:00
|
|
|
parsed.append(BinaryParsed(package, result.group('filename'), name, result.group('arch')))
|
2017-06-22 23:16:27 -05:00
|
|
|
|
|
|
|
return parsed
|
2017-08-02 21:05:21 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2017-08-23 17:03:59 -05:00
|
|
|
@memoize(session=True)
|
2018-04-26 11:23:32 -05:00
|
|
|
def package_binary_list(apiurl, project, repository, arch, package=None, strip_multibuild=True, exclude_src_debug=False):
|
2017-08-02 21:05:21 -05:00
|
|
|
path = ['build', project, repository, arch]
|
|
|
|
if package:
|
|
|
|
path.append(package)
|
|
|
|
url = makeurl(apiurl, path, {'view': 'binaryversions'})
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
|
|
|
package_binaries = []
|
2022-02-18 16:39:16 +01:00
|
|
|
binary_map = {} # last duplicate wins
|
2017-08-02 21:05:21 -05:00
|
|
|
for binary_list in root:
|
2018-04-26 11:23:32 -05:00
|
|
|
package = binary_list.get('package')
|
|
|
|
if strip_multibuild:
|
|
|
|
package = package.split(':', 1)[0]
|
|
|
|
|
2017-08-02 21:05:21 -05:00
|
|
|
for binary in binary_list:
|
|
|
|
filename = binary.get('name')
|
|
|
|
result = re.match(RPM_REGEX, filename)
|
|
|
|
if not result:
|
|
|
|
continue
|
|
|
|
|
2018-04-23 18:54:46 -05:00
|
|
|
binary = BinaryParsed(package, result.group('filename'),
|
|
|
|
result.group('name'), result.group('arch'))
|
|
|
|
if exclude_src_debug and binary_src_debug(binary):
|
|
|
|
continue
|
|
|
|
|
|
|
|
package_binaries.append(binary)
|
2017-08-02 21:05:21 -05:00
|
|
|
binary_map[result.group('filename')] = package
|
|
|
|
|
|
|
|
return package_binaries, binary_map
|
2018-01-17 18:02:37 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-04-23 18:54:46 -05:00
|
|
|
def binary_src_debug(binary):
|
|
|
|
return (
|
|
|
|
binary.arch == 'src' or
|
2018-04-26 11:23:57 -05:00
|
|
|
binary.arch == 'nosrc' or
|
2018-04-23 18:54:46 -05:00
|
|
|
binary.name.endswith('-debuginfo') or
|
|
|
|
binary.name.endswith('-debugsource')
|
|
|
|
)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-01-17 18:02:37 -06:00
|
|
|
@memoize(session=True)
|
2022-10-13 15:12:54 +02:00
|
|
|
def devel_project_get(apiurl: str, target_project: str, target_package: str):
|
2018-01-17 18:02:37 -06:00
|
|
|
try:
|
2018-08-16 00:26:53 -05:00
|
|
|
meta = ET.fromstringlist(show_package_meta(apiurl, target_project, target_package))
|
2018-01-17 18:02:37 -06:00
|
|
|
node = meta.find('devel')
|
|
|
|
if node is not None:
|
|
|
|
return node.get('project'), node.get('package')
|
|
|
|
except HTTPError as e:
|
|
|
|
if e.code != 404:
|
|
|
|
raise e
|
|
|
|
|
|
|
|
return None, None
|
2018-01-17 18:04:47 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-01-17 18:04:47 -06:00
|
|
|
@memoize(session=True)
|
|
|
|
def devel_project_fallback(apiurl, target_project, target_package):
|
|
|
|
project, package = devel_project_get(apiurl, target_project, target_package)
|
2018-01-17 18:05:17 -06:00
|
|
|
if project is None and target_project != 'openSUSE:Factory':
|
2018-01-17 18:04:47 -06:00
|
|
|
if target_project.startswith('openSUSE:'):
|
|
|
|
project, package = devel_project_get(apiurl, 'openSUSE:Factory', target_package)
|
|
|
|
elif target_project.startswith('SUSE:'):
|
|
|
|
# For SLE (assume IBS), fallback to openSUSE:Factory devel projects.
|
|
|
|
project, package = devel_project_get(apiurl, 'openSUSE.org:openSUSE:Factory', target_package)
|
|
|
|
if project:
|
|
|
|
# Strip openSUSE.org: prefix since string since not used for lookup.
|
|
|
|
project = project.split(':', 1)[1]
|
|
|
|
|
|
|
|
return project, package
|
2018-01-25 21:26:27 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-09-11 18:14:55 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def devel_projects(apiurl, project):
|
|
|
|
devel_projects = set()
|
|
|
|
|
|
|
|
root = search(apiurl, 'package', "@project='{}' and devel/@project!=''".format(project))
|
|
|
|
for devel_project in root.xpath('package/devel/@project'):
|
|
|
|
if devel_project != project:
|
|
|
|
devel_projects.add(devel_project)
|
|
|
|
|
|
|
|
return sorted(devel_projects)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-01 14:00:03 -05:00
|
|
|
def request_created(request):
|
2018-01-29 03:14:24 -06:00
|
|
|
if isinstance(request, Request):
|
|
|
|
created = request.statehistory[0].when
|
|
|
|
else:
|
|
|
|
created = request.find('history').get('when')
|
2019-11-01 14:00:03 -05:00
|
|
|
return date_parse(created)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-01 14:00:03 -05:00
|
|
|
def request_age(request):
|
|
|
|
return datetime.utcnow() - request_created(request)
|
2018-02-05 19:43:23 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-02-05 19:43:23 -06:00
|
|
|
def project_list_prefix(apiurl, prefix):
|
|
|
|
"""Get a list of project with the same prefix."""
|
|
|
|
query = {'match': 'starts-with(@name, "{}")'.format(prefix)}
|
|
|
|
url = makeurl(apiurl, ['search', 'project', 'id'], query)
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2018-02-05 19:43:23 -06:00
|
|
|
return root.xpath('project/@name')
|
2018-03-24 13:34:44 +01:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-05-30 15:24:44 -05:00
|
|
|
def project_locked(apiurl, project):
|
|
|
|
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
|
|
|
return meta.find('lock/enable') is not None
|
|
|
|
|
2018-03-24 13:34:44 +01:00
|
|
|
#
|
|
|
|
# Depdendency helpers
|
|
|
|
#
|
2022-02-18 17:15:48 +01:00
|
|
|
|
|
|
|
|
2018-03-24 13:34:44 +01:00
|
|
|
def fileinfo_ext_all(apiurl, project, repo, arch, package):
|
|
|
|
url = makeurl(apiurl, ['build', project, repo, arch, package])
|
|
|
|
binaries = ET.parse(http_GET(url)).getroot()
|
|
|
|
for binary in binaries.findall('binary'):
|
|
|
|
filename = binary.get('filename')
|
|
|
|
if not filename.endswith('.rpm'):
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield fileinfo_ext(apiurl, project, repo, arch, package, filename)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-03-24 13:34:44 +01:00
|
|
|
def fileinfo_ext(apiurl, project, repo, arch, package, filename):
|
|
|
|
url = makeurl(apiurl,
|
|
|
|
['build', project, repo, arch, package, filename],
|
|
|
|
{'view': 'fileinfo_ext'})
|
|
|
|
return ET.parse(http_GET(url)).getroot()
|
2018-04-10 22:28:48 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2022-02-18 17:28:13 +01:00
|
|
|
def builddepinfo(apiurl, project, repo, arch, order=False):
|
2019-02-11 09:02:29 +01:00
|
|
|
query = {}
|
|
|
|
if order:
|
|
|
|
query['view'] = 'order'
|
|
|
|
url = makeurl(apiurl, ['build', project, repo, arch, '_builddepinfo'], query)
|
2022-02-18 10:16:17 +01:00
|
|
|
return ET.parse(http_GET(url)).getroot()
|
2019-02-11 09:02:29 +01:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-04-10 22:28:48 -05:00
|
|
|
def entity_email(apiurl, key, entity_type='person', include_name=False):
|
|
|
|
url = makeurl(apiurl, [entity_type, key])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
|
|
|
email = root.find('email')
|
|
|
|
if email is None:
|
|
|
|
return None
|
|
|
|
email = email.text
|
|
|
|
|
|
|
|
realname = root.find('realname')
|
|
|
|
if include_name and realname is not None:
|
|
|
|
email = '{} <{}>'.format(realname.text, email)
|
|
|
|
|
|
|
|
return email
|
2018-04-13 17:08:08 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-04-13 17:08:08 -05:00
|
|
|
def source_file_load(apiurl, project, package, filename, revision=None):
|
|
|
|
query = {'expand': 1}
|
|
|
|
if revision:
|
|
|
|
query['rev'] = revision
|
|
|
|
url = makeurl(apiurl, ['source', project, package, filename], query)
|
|
|
|
try:
|
2019-05-14 17:50:31 +08:00
|
|
|
return decode_it(http_GET(url).read())
|
2018-04-13 17:08:08 -05:00
|
|
|
except HTTPError:
|
|
|
|
return None
|
2018-04-17 17:18:44 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-17 22:06:27 -05:00
|
|
|
def source_file_save(apiurl, project, package, filename, content, comment=None):
|
2019-08-07 17:26:38 -05:00
|
|
|
comment = message_suffix('updated', comment)
|
2018-08-17 22:06:27 -05:00
|
|
|
url = makeurl(apiurl, ['source', project, package, filename], {'comment': comment})
|
|
|
|
http_PUT(url, data=content)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-12-10 08:46:02 +01:00
|
|
|
def source_file_ensure(apiurl, project, package, filename, content, comment=None):
|
2018-10-03 07:36:42 +02:00
|
|
|
if content != source_file_load(apiurl, project, package, filename):
|
|
|
|
source_file_save(apiurl, project, package, filename, content, comment)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-17 22:09:23 -05:00
|
|
|
def project_pseudometa_package(apiurl, project):
|
|
|
|
package = Config.get(apiurl, project).get('pseudometa_package', '00Meta')
|
|
|
|
if '/' in package:
|
|
|
|
project, package = package.split('/', 2)
|
|
|
|
|
|
|
|
return project, package
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-17 22:09:23 -05:00
|
|
|
def project_pseudometa_file_load(apiurl, project, filename, revision=None):
|
|
|
|
project, package = project_pseudometa_package(apiurl, project)
|
2018-11-20 10:49:03 +01:00
|
|
|
source_file = source_file_load(apiurl, project, package, filename, revision)
|
|
|
|
if source_file is not None:
|
2019-05-14 17:50:31 +08:00
|
|
|
source_file = source_file.rstrip()
|
2018-11-20 10:49:03 +01:00
|
|
|
return source_file
|
2018-08-17 22:09:23 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-17 22:09:23 -05:00
|
|
|
def project_pseudometa_file_save(apiurl, project, filename, content, comment=None):
|
|
|
|
project, package = project_pseudometa_package(apiurl, project)
|
|
|
|
source_file_save(apiurl, project, package, filename, content, comment)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-17 22:09:23 -05:00
|
|
|
def project_pseudometa_file_ensure(apiurl, project, filename, content, comment=None):
|
2018-09-17 17:25:00 -05:00
|
|
|
if content != project_pseudometa_file_load(apiurl, project, filename):
|
2018-08-17 22:09:23 -05:00
|
|
|
project_pseudometa_file_save(apiurl, project, filename, content, comment)
|
|
|
|
|
2018-04-17 17:18:44 -05:00
|
|
|
# Should be an API call that says give me "real" packages that does not include
|
2019-05-13 16:20:53 -05:00
|
|
|
# multibuild entries, nor linked packages, nor maintenance update packages, but
|
|
|
|
# does included inherited packages from project layering. Unfortunately, no such
|
|
|
|
# call provides either server-side filtering nor enough information to filter
|
|
|
|
# client-side. As such extra calls must be made for each package to handle the
|
|
|
|
# various different cases that can exist between products. For a more detailed
|
|
|
|
# write-up see the opensuse-buildservice mailing list thread:
|
|
|
|
# https://lists.opensuse.org/opensuse-buildservice/2019-05/msg00020.html.
|
2022-02-18 17:15:48 +01:00
|
|
|
|
|
|
|
|
2019-05-13 16:20:53 -05:00
|
|
|
def package_list_kind_filtered(apiurl, project, kinds_allowed=['source']):
|
2018-04-17 17:18:44 -05:00
|
|
|
query = {
|
|
|
|
'view': 'info',
|
|
|
|
'nofilename': '1',
|
|
|
|
}
|
|
|
|
url = makeurl(apiurl, ['source', project], query)
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2019-05-13 16:20:53 -05:00
|
|
|
|
|
|
|
for package in root.xpath('sourceinfo/@package'):
|
|
|
|
kind = package_kind(apiurl, project, package)
|
|
|
|
if kind not in kinds_allowed:
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield package
|
2018-08-16 21:38:11 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2022-10-10 09:37:23 +02:00
|
|
|
def attribute_value_load(apiurl: str, project: str, name: str, namespace='OSRT', package: Optional[str] = None):
|
2019-11-06 11:21:35 -06:00
|
|
|
path = list(filter(None, ['source', project, package, '_attribute', namespace + ':' + name]))
|
|
|
|
url = makeurl(apiurl, path)
|
2018-08-16 21:38:11 -05:00
|
|
|
|
|
|
|
try:
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2018-08-16 21:38:11 -05:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return None
|
|
|
|
|
|
|
|
raise e
|
|
|
|
|
2019-07-11 16:02:14 -05:00
|
|
|
xpath_base = './attribute[@namespace="{}" and @name="{}"]'.format(namespace, name)
|
|
|
|
value = root.xpath('{}/value/text()'.format(xpath_base))
|
2018-08-16 21:38:11 -05:00
|
|
|
if not len(value):
|
2019-07-11 16:02:14 -05:00
|
|
|
if root.xpath(xpath_base):
|
|
|
|
# Handle boolean attributes that are present, but have no value.
|
|
|
|
return True
|
2018-08-16 21:38:11 -05:00
|
|
|
return None
|
|
|
|
|
2018-11-15 14:37:31 +01:00
|
|
|
return str(value[0])
|
2018-08-16 21:38:11 -05:00
|
|
|
|
|
|
|
# New attributes must be defined manually before they can be used. Example:
|
|
|
|
# `osc api /attribute/OSRT/IgnoredIssues/_meta outputs`
|
|
|
|
#
|
|
|
|
# The new attribute can be created via:
|
|
|
|
# `api -T $xml /attribute/OSRT/$NEWATTRIBUTE/_meta`
|
|
|
|
#
|
|
|
|
# Remember to create for both OBS and IBS as necessary.
|
2022-02-18 17:15:48 +01:00
|
|
|
|
|
|
|
|
2022-10-10 09:37:23 +02:00
|
|
|
def attribute_value_save(
|
|
|
|
apiurl: str,
|
|
|
|
project: str,
|
|
|
|
name: str,
|
|
|
|
value: str,
|
|
|
|
namespace='OSRT',
|
|
|
|
package: Optional[str] = None
|
|
|
|
):
|
2018-08-16 21:38:11 -05:00
|
|
|
root = ET.Element('attributes')
|
|
|
|
|
|
|
|
attribute = ET.SubElement(root, 'attribute')
|
|
|
|
attribute.set('namespace', namespace)
|
|
|
|
attribute.set('name', name)
|
|
|
|
|
|
|
|
ET.SubElement(attribute, 'value').text = value
|
|
|
|
|
|
|
|
# The OBS API of attributes is super strange, POST to update.
|
2019-11-06 11:21:35 -06:00
|
|
|
url = makeurl(apiurl, list(filter(None, ['source', project, package, '_attribute'])))
|
2022-03-04 09:42:27 +01:00
|
|
|
try:
|
|
|
|
http_POST(url, data=ET.tostring(root))
|
|
|
|
except HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
logging.error(f"Saving attribute {namespace}:{name} to {project} failed. You may need to create the type on your instance.")
|
|
|
|
raise e
|
2018-08-21 02:03:30 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2022-10-10 09:37:23 +02:00
|
|
|
def attribute_value_delete(apiurl: str, project: str, name: str, namespace='OSRT', package: Optional[str] = None):
|
2019-11-06 11:21:35 -06:00
|
|
|
http_DELETE(makeurl(
|
|
|
|
apiurl, list(filter(None, ['source', project, package, '_attribute', namespace + ':' + name]))))
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-21 02:03:30 -05:00
|
|
|
@memoize(session=True)
|
2022-10-10 09:37:23 +02:00
|
|
|
def repository_path_expand(apiurl: str, project: str, repo: str, visited_repos: Optional[set] = None):
|
2018-08-21 02:03:30 -05:00
|
|
|
"""Recursively list underlying projects."""
|
2021-08-12 12:06:47 +02:00
|
|
|
if visited_repos is None:
|
|
|
|
visited_repos = set()
|
2020-06-05 14:48:36 +02:00
|
|
|
repos = [[project, repo]]
|
2018-08-21 02:03:30 -05:00
|
|
|
meta = ET.fromstringlist(show_project_meta(apiurl, project))
|
2020-06-05 14:48:36 +02:00
|
|
|
paths = meta.findall('.//repository[@name="{}"]/path'.format(repo))
|
2019-05-11 15:52:20 +02:00
|
|
|
|
2020-06-05 14:48:36 +02:00
|
|
|
# The listed paths are taken as-is, except for the last one...
|
|
|
|
for path in paths[:-1]:
|
|
|
|
repos += [[path.get('project', project), path.get('repository')]]
|
2018-08-21 02:03:30 -05:00
|
|
|
|
2020-06-05 14:48:36 +02:00
|
|
|
# ...which is expanded recursively
|
|
|
|
if len(paths) > 0:
|
2021-08-12 12:06:47 +02:00
|
|
|
p_project = paths[-1].get('project', project)
|
|
|
|
p_repository = paths[-1].get('repository')
|
|
|
|
if (p_project, p_repository) not in visited_repos:
|
|
|
|
visited_repos.add((p_project, p_repository))
|
|
|
|
repos += repository_path_expand(apiurl, p_project, p_repository, visited_repos)
|
2018-08-21 02:03:30 -05:00
|
|
|
return repos
|
2018-08-21 02:03:53 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-21 02:03:53 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def repository_path_search(apiurl, project, search_project, search_repository):
|
|
|
|
queue = []
|
|
|
|
|
|
|
|
# Initialize breadth first search queue with repositories from top project.
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.fromstringlist(show_project_meta(apiurl, project))
|
2018-08-21 02:03:53 -05:00
|
|
|
for repository in root.xpath('repository[path[@project and @repository]]/@name'):
|
|
|
|
queue.append((repository, project, repository))
|
|
|
|
|
|
|
|
# Perform a breadth first search and return the first repository chain with
|
|
|
|
# a series of path elements targeting search project and repository.
|
|
|
|
for repository_top, project, repository in queue:
|
|
|
|
if root.get('name') != project:
|
|
|
|
# Repositories for a single project are in a row so cache parsing.
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.fromstringlist(show_project_meta(apiurl, project))
|
2018-08-21 02:03:53 -05:00
|
|
|
|
|
|
|
paths = root.findall('repository[@name="{}"]/path'.format(repository))
|
|
|
|
for path in paths:
|
|
|
|
if path.get('project') == search_project and path.get('repository') == search_repository:
|
|
|
|
return repository_top
|
|
|
|
|
|
|
|
queue.append((repository_top, path.get('project'), path.get('repository')))
|
|
|
|
|
|
|
|
return None
|
2018-08-21 02:04:20 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-09-27 07:29:39 +02:00
|
|
|
def repository_arch_state(apiurl, project, repository, arch):
|
|
|
|
# just checking the mtimes of the repository's binaries
|
|
|
|
url = makeurl(apiurl, ['build', project, repository, arch, '_repository'])
|
|
|
|
from osclib.util import sha1_short
|
2019-02-06 06:30:51 +01:00
|
|
|
try:
|
|
|
|
return sha1_short(http_GET(url).read())
|
|
|
|
except HTTPError as e:
|
|
|
|
# e.g. staging projects inherit the project config from 'ports' repository.
|
|
|
|
# but that repository does not contain the archs we want, as such it has no state
|
|
|
|
if e.code != 404:
|
|
|
|
raise e
|
2018-09-27 07:29:39 +02:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-10-24 17:50:19 -05:00
|
|
|
def repository_state(apiurl, project, repository, archs=[]):
|
|
|
|
if not len(archs):
|
|
|
|
archs = target_archs(apiurl, project, repository)
|
|
|
|
|
2018-09-26 16:56:36 -05:00
|
|
|
# Unfortunately, the state hash reflects the published state and not the
|
|
|
|
# binaries published in repository. As such request binary list and hash.
|
2018-09-27 07:29:39 +02:00
|
|
|
combined_state = []
|
2018-10-24 17:50:19 -05:00
|
|
|
for arch in archs:
|
2019-02-06 06:30:51 +01:00
|
|
|
state = repository_arch_state(apiurl, project, repository, arch)
|
|
|
|
if state:
|
|
|
|
combined_state.append(state)
|
2018-09-26 16:56:36 -05:00
|
|
|
from osclib.util import sha1_short
|
2018-09-27 07:29:39 +02:00
|
|
|
return sha1_short(combined_state)
|
2018-08-21 02:04:20 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-10-24 17:50:19 -05:00
|
|
|
def repositories_states(apiurl, repository_pairs, archs=[]):
|
2018-08-21 02:04:20 -05:00
|
|
|
states = []
|
|
|
|
|
|
|
|
for project, repository in repository_pairs:
|
2019-02-06 06:30:51 +01:00
|
|
|
state = repository_state(apiurl, project, repository, archs)
|
|
|
|
if state:
|
|
|
|
states.append(state)
|
2018-08-21 02:04:20 -05:00
|
|
|
|
|
|
|
return states
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-10-24 17:50:19 -05:00
|
|
|
def repository_published(apiurl, project, repository, archs=[]):
|
2018-10-29 17:46:00 -05:00
|
|
|
# In a perfect world this would check for the existence of imports from i586
|
|
|
|
# into x86_64, but in an even more perfect world OBS would show archs that
|
|
|
|
# depend on another arch for imports as not completed until the dependent
|
|
|
|
# arch completes. This is a simplified check that ensures x86_64 repos are
|
|
|
|
# not indicated as published when i586 has not finished which is primarily
|
|
|
|
# useful for repo_checker when only checking x86_64. The API treats archs as
|
|
|
|
# a filter on what to return and thus non-existent archs do not cause an
|
|
|
|
# issue nor alter the result.
|
|
|
|
if 'x86_64' in archs and 'i586' not in archs:
|
|
|
|
# Create a copy to avoid altering caller's list.
|
|
|
|
archs = list(archs)
|
|
|
|
archs.append('i586')
|
|
|
|
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.fromstringlist(show_results_meta(
|
2018-10-24 17:50:19 -05:00
|
|
|
apiurl, project, multibuild=True, repository=[repository], arch=archs))
|
2018-08-21 02:04:20 -05:00
|
|
|
return not len(root.xpath('result[@state!="published" and @state!="unpublished"]'))
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-10-24 17:50:19 -05:00
|
|
|
def repositories_published(apiurl, repository_pairs, archs=[]):
|
2018-08-21 02:04:20 -05:00
|
|
|
for project, repository in repository_pairs:
|
2018-10-24 17:50:19 -05:00
|
|
|
if not repository_published(apiurl, project, repository, archs):
|
2018-08-21 02:04:20 -05:00
|
|
|
return (project, repository)
|
|
|
|
|
|
|
|
return True
|
2018-08-21 02:04:39 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2018-08-21 02:04:39 -05:00
|
|
|
def project_meta_revision(apiurl, project):
|
|
|
|
root = ET.fromstringlist(get_commitlog(
|
|
|
|
apiurl, project, '_project', None, format='xml', meta=True))
|
|
|
|
return int(root.find('logentry').get('revision'))
|
2019-02-15 10:55:39 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-06 11:21:35 -06:00
|
|
|
def package_source_changed(apiurl, project, package):
|
|
|
|
url = makeurl(apiurl, ['source', project, package, '_history'], {'limit': 1})
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2019-11-06 11:21:35 -06:00
|
|
|
return datetime.fromtimestamp(int(root.find('revision/time').text), timezone.utc).replace(tzinfo=None)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-06 11:21:35 -06:00
|
|
|
def package_source_age(apiurl, project, package):
|
|
|
|
return datetime.utcnow() - package_source_changed(apiurl, project, package)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def entity_exists(apiurl, project, package=None):
|
|
|
|
try:
|
2019-05-02 20:34:21 +02:00
|
|
|
http_GET(makeurl(apiurl, list(filter(None, ['source', project, package])) + ['_meta']))
|
2019-02-15 10:55:39 -06:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return False
|
|
|
|
|
|
|
|
raise e
|
|
|
|
|
|
|
|
return True
|
2019-02-15 10:55:39 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-05-13 16:18:33 -05:00
|
|
|
def package_kind(apiurl, project, package):
|
2019-09-24 11:25:18 -05:00
|
|
|
if package.startswith('00') or package.startswith('_'):
|
2019-05-13 16:18:33 -05:00
|
|
|
return 'meta'
|
|
|
|
|
|
|
|
if ':' in package:
|
|
|
|
return 'multibuild_subpackage'
|
|
|
|
|
|
|
|
if package.startswith('patchinfo.'):
|
|
|
|
return 'patchinfo'
|
|
|
|
|
|
|
|
try:
|
|
|
|
url = makeurl(apiurl, ['source', project, package, '_meta'])
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2019-05-13 16:18:33 -05:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return None
|
|
|
|
|
|
|
|
raise e
|
|
|
|
|
2019-11-06 14:43:20 -06:00
|
|
|
if root.find('releasename') is not None and root.find('releasename').text != package:
|
2019-05-13 16:18:33 -05:00
|
|
|
return 'maintenance_update'
|
|
|
|
|
|
|
|
# Some multispec subpackages do not have bcntsynctag, so check link.
|
|
|
|
link = entity_source_link(apiurl, project, package)
|
|
|
|
if link is not None and link.get('cicount') == 'copy':
|
|
|
|
kind_target = package_kind(apiurl, project, link.get('package'))
|
|
|
|
if kind_target != 'maintenance_update':
|
|
|
|
# If a multispec subpackage was updated via a maintenance update the
|
|
|
|
# proper link information is lost and it will be considered source.
|
|
|
|
return 'multispec_subpackage'
|
|
|
|
|
|
|
|
return 'source'
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def entity_source_link(apiurl, project, package=None):
|
|
|
|
try:
|
|
|
|
if package:
|
|
|
|
parts = ['source', project, package, '_link']
|
|
|
|
else:
|
|
|
|
parts = ['source', project, '_meta']
|
|
|
|
url = makeurl(apiurl, parts)
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2019-02-15 10:55:39 -06:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return None
|
|
|
|
|
|
|
|
raise e
|
|
|
|
|
|
|
|
return root if package else root.find('link')
|
2019-02-15 10:55:39 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
@memoize(session=True)
|
|
|
|
def package_source_link_copy(apiurl, project, package):
|
|
|
|
link = entity_source_link(apiurl, project, package)
|
|
|
|
return link is not None and link.get('cicount') == 'copy'
|
|
|
|
|
|
|
|
# Ideally, all package_source_hash* functions would operate on srcmd5, but
|
|
|
|
# unfortunately that is not practical for real use-cases. The srcmd5 includes
|
|
|
|
# service run information in addition to the presence of a link even if the
|
|
|
|
# expanded sources are identical. The verifymd5 sum excludes such information
|
|
|
|
# and only covers the sources (as should be the point), but looks at the link
|
|
|
|
# sources which means for projects like devel which link to the head revision of
|
|
|
|
# downstream all the verifymd5 sums are the same. This makes the summary md5s
|
|
|
|
# provided by OBS useless for comparing source and really anything. Instead the
|
|
|
|
# individual file md5s are used to generate a sha1 which is used for comparison.
|
|
|
|
# In the case of maintenance projects they are structured such that the updates
|
|
|
|
# are suffixed packages and the unsuffixed package is empty and only links to
|
|
|
|
# a specific suffixed package each revision. As such for maintenance projects
|
|
|
|
# the link must be expanded and is safe to do so. Additionally, projects that
|
|
|
|
# inherit packages need to same treatment (ie. expanding) until they are
|
|
|
|
# overridden within the project.
|
2022-02-18 17:15:48 +01:00
|
|
|
|
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
@memoize(session=True)
|
|
|
|
def package_source_hash(apiurl, project, package, revision=None):
|
|
|
|
query = {}
|
|
|
|
if revision:
|
|
|
|
query['rev'] = revision
|
|
|
|
|
|
|
|
# Will not catch packages that previous had a link, but no longer do.
|
|
|
|
if package_source_link_copy(apiurl, project, package):
|
|
|
|
query['expand'] = 1
|
|
|
|
|
|
|
|
try:
|
|
|
|
url = makeurl(apiurl, ['source', project, package], query)
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2019-02-15 10:55:39 -06:00
|
|
|
except HTTPError as e:
|
2019-03-20 17:38:19 -05:00
|
|
|
if e.code == 400 or e.code == 404:
|
|
|
|
# 400: revision not found, 404: package not found.
|
2019-02-15 10:55:39 -06:00
|
|
|
return None
|
|
|
|
|
|
|
|
raise e
|
|
|
|
|
|
|
|
if revision and root.find('error') is not None:
|
|
|
|
# OBS returns XML error instead of HTTP 404 if revision not found.
|
|
|
|
return None
|
|
|
|
|
|
|
|
from osclib.util import sha1_short
|
|
|
|
return sha1_short(root.xpath('entry[@name!="_link"]/@md5'))
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def package_source_hash_history(apiurl, project, package, limit=5, include_project_link=False):
|
|
|
|
try:
|
|
|
|
# get_commitlog() reverses the order so newest revisions are first.
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.fromstringlist(
|
2019-02-15 10:55:39 -06:00
|
|
|
get_commitlog(apiurl, project, package, None, format='xml'))
|
|
|
|
except HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return
|
|
|
|
|
|
|
|
raise e
|
|
|
|
|
|
|
|
if include_project_link:
|
|
|
|
source_hashes = []
|
|
|
|
|
|
|
|
source_md5s = root.xpath('logentry/@srcmd5')
|
|
|
|
for source_md5 in source_md5s[:limit]:
|
|
|
|
source_hash = package_source_hash(apiurl, project, package, source_md5)
|
|
|
|
yield source_hash
|
|
|
|
|
|
|
|
if include_project_link:
|
|
|
|
source_hashes.append(source_hash)
|
|
|
|
|
|
|
|
if include_project_link and (not limit or len(source_md5s) < limit):
|
|
|
|
link = entity_source_link(apiurl, project)
|
|
|
|
if link is None:
|
|
|
|
return
|
|
|
|
project = link.get('project')
|
|
|
|
|
|
|
|
if limit:
|
|
|
|
limit_remaining = limit - len(source_md5s)
|
|
|
|
|
|
|
|
# Allow small margin for duplicates.
|
|
|
|
for source_hash in package_source_hash_history(apiurl, project, package, None, True):
|
|
|
|
if source_hash in source_hashes:
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield source_hash
|
|
|
|
|
|
|
|
if limit:
|
|
|
|
limit_remaining += -1
|
|
|
|
if limit_remaining == 0:
|
|
|
|
break
|
2019-02-15 10:55:39 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-05-01 15:04:10 -05:00
|
|
|
def package_version(apiurl, project, package):
|
|
|
|
try:
|
|
|
|
url = makeurl(apiurl, ['source', project, package, '_history'], {'limit': 1})
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2019-05-01 15:04:10 -05:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return False
|
|
|
|
|
|
|
|
raise e
|
|
|
|
|
2019-05-23 17:09:29 -05:00
|
|
|
return str(root.xpath('(//version)[last()]/text()')[0])
|
2019-05-01 15:04:10 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-09-11 18:17:12 -05:00
|
|
|
def project_attribute_list(apiurl, attribute, locked=None):
|
2019-05-01 15:04:10 -05:00
|
|
|
xpath = 'attribute/@name="{}"'.format(attribute)
|
2019-07-11 15:58:35 -05:00
|
|
|
root = search(apiurl, 'project', xpath)
|
2019-09-11 18:17:12 -05:00
|
|
|
for project in root.xpath('project/@name'):
|
|
|
|
# Locked not exposed via OBS xpath engine.
|
|
|
|
if locked is not None and project_locked(apiurl, project) != locked:
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield project
|
2019-05-01 15:04:10 -05:00
|
|
|
|
2019-09-11 18:17:42 -05:00
|
|
|
# OBS xpath engine does not support multiple attribute queries nor negation. As
|
|
|
|
# such both must be done client-side.
|
2022-02-18 17:15:48 +01:00
|
|
|
|
|
|
|
|
2019-09-11 18:17:42 -05:00
|
|
|
def project_attributes_list(apiurl, attributes, attributes_not=None, locked=None):
|
|
|
|
projects = set()
|
|
|
|
|
|
|
|
for attribute in attributes:
|
|
|
|
projects.update(project_attribute_list(apiurl, attribute, locked))
|
|
|
|
|
|
|
|
for attribute in attributes_not:
|
|
|
|
projects.difference_update(project_attribute_list(apiurl, attribute, locked))
|
|
|
|
|
|
|
|
return list(projects)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
@memoize(session=True)
|
|
|
|
def project_remote_list(apiurl):
|
|
|
|
remotes = {}
|
|
|
|
|
2019-07-11 15:58:35 -05:00
|
|
|
root = search(apiurl, 'project', 'starts-with(remoteurl, "http")')
|
2019-02-15 10:55:39 -06:00
|
|
|
for project in root.findall('project'):
|
|
|
|
# Strip ending /public as the only use-cases for manually checking
|
|
|
|
# remote projects is to query them directly to use an API that does not
|
|
|
|
# work over the interconnect. As such /public will have same problem.
|
|
|
|
remotes[project.get('name')] = re.sub('/public$', '', project.find('remoteurl').text)
|
|
|
|
|
|
|
|
return remotes
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def project_remote_apiurl(apiurl, project):
|
|
|
|
remotes = project_remote_list(apiurl)
|
|
|
|
for remote in remotes:
|
|
|
|
if project.startswith(remote + ':'):
|
|
|
|
return remotes[remote], project[len(remote) + 1:]
|
|
|
|
|
|
|
|
return apiurl, project
|
2019-02-15 10:55:39 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-05 16:39:01 -06:00
|
|
|
def project_remote_prefixed(apiurl, apiurl_remote, project):
|
|
|
|
if apiurl_remote == apiurl:
|
|
|
|
return project
|
|
|
|
|
|
|
|
remotes = project_remote_list(apiurl)
|
|
|
|
for remote, remote_apiurl in remotes.items():
|
|
|
|
if remote_apiurl == apiurl_remote:
|
|
|
|
return remote + ':' + project
|
|
|
|
|
|
|
|
raise Exception('remote APIURL interconnect not configured for{}'.format(apiurl_remote))
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-09-25 15:33:20 -05:00
|
|
|
def review_find_last(request, user, states=['all']):
|
2019-02-15 10:55:39 -06:00
|
|
|
for review in reversed(request.reviews):
|
2019-09-25 15:33:20 -05:00
|
|
|
if review.by_user == user and ('all' in states or review.state in states):
|
2019-02-15 10:55:39 -06:00
|
|
|
return review
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-07 10:59:21 -06:00
|
|
|
def reviews_remaining(request, incident_psuedo=False):
|
2019-02-15 10:55:39 -06:00
|
|
|
reviews = []
|
|
|
|
for review in request.reviews:
|
|
|
|
if review.state != 'accepted':
|
|
|
|
reviews.append(review_short(review))
|
|
|
|
|
2019-11-07 10:59:21 -06:00
|
|
|
if incident_psuedo:
|
|
|
|
# Add review in the same style as the staging review used for non
|
|
|
|
# maintenance projects to allow for the same wait on review.
|
|
|
|
for action in request.actions:
|
|
|
|
if action.type == 'maintenance_incident':
|
|
|
|
reviews.append('maintenance_incident')
|
|
|
|
break
|
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
return reviews
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def review_short(review):
|
|
|
|
if review.by_user:
|
|
|
|
return review.by_user
|
|
|
|
if review.by_group:
|
|
|
|
return review.by_group
|
|
|
|
if review.by_project:
|
|
|
|
if review.by_package:
|
|
|
|
return '/'.join([review.by_project, review.by_package])
|
|
|
|
return review.by_project
|
|
|
|
|
|
|
|
return None
|
2019-02-15 10:55:39 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def issue_trackers(apiurl):
|
|
|
|
url = makeurl(apiurl, ['issue_trackers'])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
trackers = {}
|
|
|
|
for tracker in root.findall('issue-tracker'):
|
|
|
|
trackers[tracker.find('name').text] = tracker.find('label').text
|
|
|
|
return trackers
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def issue_tracker_by_url(apiurl, tracker_url):
|
|
|
|
url = makeurl(apiurl, ['issue_trackers'])
|
2022-02-18 10:16:17 +01:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2019-02-15 10:55:39 -06:00
|
|
|
if not tracker_url.endswith('/'):
|
|
|
|
# All trackers are formatted with trailing slash.
|
|
|
|
tracker_url += '/'
|
|
|
|
return next(iter(root.xpath('issue-tracker[url[text()="{}"]]'.format(tracker_url)) or []), None)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def issue_tracker_label_apply(tracker, identifier):
|
|
|
|
return tracker.find('label').text.replace('@@@', identifier)
|
2019-02-15 10:55:39 -06:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-02-15 10:55:39 -06:00
|
|
|
def request_remote_identifier(apiurl, apiurl_remote, request_id):
|
|
|
|
if apiurl_remote == apiurl:
|
|
|
|
return 'request#{}'.format(request_id)
|
|
|
|
|
|
|
|
# The URL differences make this rather convoluted.
|
|
|
|
tracker = issue_tracker_by_url(apiurl, apiurl_remote.replace('api.', 'build.'))
|
|
|
|
if tracker is not None:
|
|
|
|
return issue_tracker_label_apply(tracker, request_id)
|
|
|
|
|
|
|
|
return request_id
|
2019-05-29 13:56:46 +02:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-05-29 13:56:46 +02:00
|
|
|
def duplicated_binaries_in_repo(apiurl, project, repository):
|
|
|
|
duplicates = {}
|
|
|
|
for arch in sorted(target_archs(apiurl, project, repository), reverse=True):
|
|
|
|
package_binaries, _ = package_binary_list(
|
|
|
|
apiurl, project, repository, arch,
|
|
|
|
strip_multibuild=False, exclude_src_debug=True)
|
|
|
|
binaries = {}
|
|
|
|
for pb in package_binaries:
|
|
|
|
if pb.arch != 'noarch' and pb.arch != arch:
|
|
|
|
continue
|
|
|
|
|
|
|
|
binaries.setdefault(arch, {})
|
|
|
|
|
|
|
|
if pb.name in binaries[arch]:
|
|
|
|
duplicates.setdefault(str(arch), {})
|
|
|
|
duplicates[arch].setdefault(pb.name, set())
|
|
|
|
duplicates[arch][pb.name].add(pb.package)
|
|
|
|
duplicates[arch][pb.name].add(binaries[arch][pb.name])
|
|
|
|
continue
|
|
|
|
|
|
|
|
binaries[arch][pb.name] = pb.package
|
|
|
|
|
|
|
|
# convert sets to lists for readable yaml
|
|
|
|
for arch in duplicates.keys():
|
|
|
|
for name in duplicates[arch].keys():
|
|
|
|
duplicates[arch][name] = list(duplicates[arch][name])
|
|
|
|
|
|
|
|
return duplicates
|
2019-07-11 15:58:35 -05:00
|
|
|
|
|
|
|
# osc.core.search() is over-complicated and does not return lxml element.
|
2022-02-18 17:15:48 +01:00
|
|
|
|
|
|
|
|
2019-07-11 15:58:35 -05:00
|
|
|
def search(apiurl, path, xpath, query={}):
|
|
|
|
query['match'] = xpath
|
|
|
|
url = makeurl(apiurl, ['search', path], query)
|
2022-02-18 10:16:17 +01:00
|
|
|
return ET.parse(http_GET(url)).getroot()
|
2019-07-11 16:01:42 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-07-11 16:01:42 -05:00
|
|
|
def action_is_patchinfo(action):
|
|
|
|
return (action.type == 'maintenance_incident' and (
|
|
|
|
action.src_package == 'patchinfo' or action.src_package.startswith('patchinfo.')))
|
2019-07-11 16:06:06 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-07-11 16:06:06 -05:00
|
|
|
def request_action_key(action):
|
|
|
|
identifier = []
|
|
|
|
|
2019-09-04 16:12:46 -05:00
|
|
|
if action.type in ['add_role', 'change_devel', 'maintenance_release', 'set_bugowner', 'submit']:
|
2019-07-11 16:06:06 -05:00
|
|
|
identifier.append(action.tgt_project)
|
2019-09-04 16:13:03 -05:00
|
|
|
if action.tgt_package is not None:
|
|
|
|
identifier.append(action.tgt_package)
|
2019-07-11 16:06:06 -05:00
|
|
|
|
|
|
|
if action.type in ['add_role', 'set_bugowner']:
|
|
|
|
if action.person_name is not None:
|
|
|
|
identifier.append(action.person_name)
|
|
|
|
if action.type == 'add_role':
|
|
|
|
identifier.append(action.person_role)
|
|
|
|
else:
|
|
|
|
identifier.append(action.group_name)
|
|
|
|
if action.type == 'add_role':
|
|
|
|
identifier.append(action.group_role)
|
|
|
|
elif action.type == 'delete':
|
|
|
|
identifier.append(action.tgt_project)
|
|
|
|
if action.tgt_package is not None:
|
|
|
|
identifier.append(action.tgt_package)
|
|
|
|
elif action.tgt_repository is not None:
|
|
|
|
identifier.append(action.tgt_repository)
|
|
|
|
elif action.type == 'maintenance_incident':
|
|
|
|
if not action_is_patchinfo(action):
|
|
|
|
identifier.append(action.tgt_releaseproject)
|
|
|
|
identifier.append(action.src_package)
|
|
|
|
|
|
|
|
return '::'.join(['/'.join(identifier), action.type])
|
2019-07-11 16:24:27 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-07-11 16:24:27 -05:00
|
|
|
def request_action_list_maintenance_incident(apiurl, project, package, states=['new', 'review']):
|
|
|
|
# The maintenance workflow seems to be designed to be as difficult to find
|
|
|
|
# requests as possible. As such, in order to find incidents for a given
|
|
|
|
# target project one must search for the requests in two states: before and
|
|
|
|
# after being assigned to an incident project. Additionally, one must search
|
|
|
|
# the "maintenance projects" denoted by an attribute instead of the actual
|
|
|
|
# target project. To make matters worse the actual target project of the
|
|
|
|
# request is not accessible via search (ie. action/target/releaseproject)
|
|
|
|
# so it must be checked client side. Lastly, since multiple actions are also
|
|
|
|
# designed completely wrong one must loop over the actions and recheck the
|
|
|
|
# search parameters to figure out which action caused the request to be
|
|
|
|
# included in the search results. Overall, another prime example of design
|
|
|
|
# done completely and utterly wrong.
|
|
|
|
|
|
|
|
package_repository = '{}.{}'.format(package, project.replace(':', '_'))
|
|
|
|
|
|
|
|
# Loop over all maintenance projects and create selectors for the two
|
|
|
|
# request states for the given project.
|
|
|
|
xpath = ''
|
|
|
|
for maintenance_project in project_attribute_list(apiurl, 'OBS:MaintenanceProject'):
|
|
|
|
xpath_project = ''
|
|
|
|
|
|
|
|
# Before being assigned to an incident.
|
|
|
|
xpath_project = xpath_join(xpath_project, 'action/target/@project="{}"'.format(
|
|
|
|
maintenance_project))
|
2019-11-07 10:59:21 -06:00
|
|
|
|
|
|
|
xpath_project_package = ''
|
|
|
|
xpath_project_package = xpath_join(
|
|
|
|
xpath_project_package, 'action/source/@package="{}"'.format(package))
|
|
|
|
xpath_project_package = xpath_join(
|
|
|
|
xpath_project_package, 'action/source/@package="{}"'.format(
|
|
|
|
package_repository), op='or', inner=True)
|
|
|
|
|
|
|
|
xpath_project = xpath_join(xpath_project, f'({xpath_project_package})', op='and', inner=True)
|
2019-07-11 16:24:27 -05:00
|
|
|
|
|
|
|
xpath = xpath_join(xpath, xpath_project, op='or', nexpr_parentheses=True)
|
|
|
|
xpath_project = ''
|
|
|
|
|
|
|
|
# After being assigned to an incident.
|
|
|
|
xpath_project = xpath_join(xpath_project, 'starts-with(action/target/@project,"{}:")'.format(
|
|
|
|
maintenance_project))
|
|
|
|
xpath_project = xpath_join(xpath_project, 'action/target/@package="{}"'.format(
|
|
|
|
package_repository), op='and', inner=True)
|
|
|
|
|
|
|
|
xpath = xpath_join(xpath, xpath_project, op='or', nexpr_parentheses=True)
|
|
|
|
|
|
|
|
xpath = '({})'.format(xpath)
|
|
|
|
|
2021-09-21 14:20:08 +02:00
|
|
|
if 'all' not in states:
|
2019-07-11 16:24:27 -05:00
|
|
|
xpath_states = ''
|
|
|
|
for state in states:
|
|
|
|
xpath_states = xpath_join(xpath_states, 'state/@name="{}"'.format(state), inner=True)
|
|
|
|
xpath = xpath_join(xpath, xpath_states, op='and', nexpr_parentheses=True)
|
|
|
|
|
|
|
|
xpath = xpath_join(xpath, 'action/@type="maintenance_incident"', op='and')
|
|
|
|
|
|
|
|
root = search(apiurl, 'request', xpath)
|
|
|
|
for request_element in root.findall('request'):
|
|
|
|
request = Request()
|
|
|
|
request.read(request_element)
|
|
|
|
|
|
|
|
for action in request.actions:
|
|
|
|
if action.type == 'maintenance_incident' and action.tgt_releaseproject == project and (
|
2019-11-07 10:59:21 -06:00
|
|
|
(action.tgt_package is None and
|
|
|
|
(action.src_package == package or action.src_package == package_repository)) or
|
2022-02-18 15:13:50 +01:00
|
|
|
(action.tgt_package == package_repository)):
|
2019-07-11 16:24:27 -05:00
|
|
|
yield request, action
|
|
|
|
break
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-07-11 16:24:27 -05:00
|
|
|
def request_action_list_maintenance_release(apiurl, project, package, states=['new', 'review']):
|
|
|
|
package_repository = '{}.{}'.format(package, project.replace(':', '_'))
|
|
|
|
|
|
|
|
xpath = 'action/target/@project="{}"'.format(project)
|
|
|
|
xpath = xpath_join(xpath, 'action/source/@package="{}"'.format(package_repository), op='and', inner=True)
|
|
|
|
xpath = '({})'.format(xpath)
|
|
|
|
|
2021-09-21 14:20:08 +02:00
|
|
|
if 'all' not in states:
|
2019-07-11 16:24:27 -05:00
|
|
|
xpath_states = ''
|
|
|
|
for state in states:
|
|
|
|
xpath_states = xpath_join(xpath_states, 'state/@name="{}"'.format(state), inner=True)
|
|
|
|
xpath = xpath_join(xpath, xpath_states, op='and', nexpr_parentheses=True)
|
|
|
|
|
|
|
|
xpath = xpath_join(xpath, 'action/@type="maintenance_release"', op='and')
|
|
|
|
|
|
|
|
root = search(apiurl, 'request', xpath)
|
|
|
|
for request_element in root.findall('request'):
|
|
|
|
request = Request()
|
|
|
|
request.read(request_element)
|
|
|
|
|
|
|
|
for action in request.actions:
|
|
|
|
if (action.type == 'maintenance_release' and
|
2022-02-18 15:13:50 +01:00
|
|
|
action.tgt_project == project and action.src_package == package_repository):
|
2019-07-11 16:24:27 -05:00
|
|
|
yield request, action
|
|
|
|
break
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-09-25 14:40:56 -05:00
|
|
|
def request_action_simple_list(apiurl, project, package, states, request_type):
|
2019-09-25 14:35:25 -05:00
|
|
|
# Disable including source project in get_request_list() query.
|
|
|
|
before = conf.config['include_request_from_project']
|
|
|
|
conf.config['include_request_from_project'] = False
|
2023-01-25 16:42:42 +01:00
|
|
|
requests = get_request_list_with_history(apiurl, project, package, None, states, request_type)
|
2019-09-25 14:35:25 -05:00
|
|
|
conf.config['include_request_from_project'] = before
|
|
|
|
|
|
|
|
for request in requests:
|
2019-09-25 14:37:24 -05:00
|
|
|
for action in request.actions:
|
|
|
|
if action.tgt_project == project and action.tgt_package == package:
|
|
|
|
yield request, action
|
|
|
|
break
|
2019-07-11 16:24:27 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-07-11 16:24:27 -05:00
|
|
|
def request_action_list(apiurl, project, package, states=['new', 'review'], types=['submit']):
|
|
|
|
for request_type in types:
|
|
|
|
if request_type == 'maintenance_incident':
|
|
|
|
yield from request_action_list_maintenance_incident(apiurl, project, package, states)
|
|
|
|
if request_type == 'maintenance_release':
|
|
|
|
yield from request_action_list_maintenance_release(apiurl, project, package, states)
|
|
|
|
else:
|
2019-09-25 14:40:56 -05:00
|
|
|
yield from request_action_simple_list(apiurl, project, package, states, request_type)
|
2019-07-11 16:24:27 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-07-11 16:24:27 -05:00
|
|
|
def request_action_list_source(apiurl, project, package, states=['new', 'review'], include_release=False):
|
|
|
|
types = []
|
|
|
|
if attribute_value_load(apiurl, project, 'Maintained', 'OBS'):
|
|
|
|
types.append('maintenance_incident')
|
|
|
|
if include_release:
|
|
|
|
types.append('maintenance_release')
|
|
|
|
else:
|
|
|
|
types.append('submit')
|
|
|
|
|
|
|
|
yield from request_action_list(apiurl, project, package, states, types)
|
2019-08-07 17:26:38 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-08-07 17:26:38 -05:00
|
|
|
def request_create_submit(apiurl, source_project, source_package,
|
2019-11-01 16:24:21 -05:00
|
|
|
target_project, target_package=None, message=None, revision=None,
|
2019-11-06 11:21:35 -06:00
|
|
|
ignore_if_any_request=False, supersede=True, frequency=None):
|
2019-11-01 16:24:21 -05:00
|
|
|
"""
|
|
|
|
ignore_if_any_request: ignore source changes and do not submit if any prior requests
|
|
|
|
"""
|
|
|
|
|
2019-08-07 17:26:38 -05:00
|
|
|
if not target_package:
|
|
|
|
target_package = source_package
|
|
|
|
|
|
|
|
source_hash = package_source_hash(apiurl, target_project, target_package)
|
|
|
|
source_hash_consider = package_source_hash(apiurl, source_project, source_package, revision)
|
|
|
|
if source_hash_consider == source_hash:
|
|
|
|
# No sense submitting identical sources.
|
|
|
|
return False
|
|
|
|
|
2019-09-25 14:40:38 -05:00
|
|
|
for request, action in request_action_list(
|
2022-02-18 15:50:32 +01:00
|
|
|
apiurl, target_project, target_package, REQUEST_STATES_MINUS_ACCEPTED, ['submit']):
|
2019-11-01 16:24:21 -05:00
|
|
|
if ignore_if_any_request:
|
|
|
|
return False
|
2019-11-06 11:21:35 -06:00
|
|
|
if not supersede and request.state.name in ('new', 'review'):
|
|
|
|
return False
|
2019-11-06 11:21:35 -06:00
|
|
|
if frequency and request_age(request).total_seconds() < frequency:
|
|
|
|
return False
|
2019-11-01 16:24:21 -05:00
|
|
|
|
2019-08-07 17:26:38 -05:00
|
|
|
source_hash_pending = package_source_hash(
|
|
|
|
apiurl, action.src_project, action.src_package, action.src_rev)
|
|
|
|
if source_hash_pending == source_hash_consider:
|
|
|
|
# Pending request with identical sources.
|
|
|
|
return False
|
|
|
|
|
|
|
|
message = message_suffix('created', message)
|
|
|
|
|
|
|
|
def create_function():
|
|
|
|
return create_submit_request(apiurl, source_project, source_package,
|
|
|
|
target_project, target_package,
|
|
|
|
message=message, orev=revision)
|
|
|
|
|
|
|
|
return RequestFuture('submit {}/{} -> {}/{}'.format(
|
|
|
|
source_project, source_package, target_project, target_package), create_function)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-08-07 17:26:38 -05:00
|
|
|
def request_create_delete(apiurl, target_project, target_package, message=None):
|
2019-09-25 14:40:38 -05:00
|
|
|
for request, action in request_action_list(
|
2022-02-18 15:50:32 +01:00
|
|
|
apiurl, target_project, target_package, REQUEST_STATES_MINUS_ACCEPTED, ['delete']):
|
2019-08-07 17:26:38 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
# No proper API function to perform the same operation.
|
|
|
|
message = message_suffix('created', message)
|
|
|
|
|
|
|
|
def create_function():
|
2019-09-18 18:04:43 -05:00
|
|
|
return create_delete_request(apiurl, target_project, target_package, message)
|
2019-08-07 17:26:38 -05:00
|
|
|
|
|
|
|
return RequestFuture('delete {}/{}'.format(target_project, target_package), create_function)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-01 14:03:11 -05:00
|
|
|
def request_create_change_devel(apiurl, source_project, source_package,
|
|
|
|
target_project, target_package=None, message=None):
|
|
|
|
if not target_package:
|
|
|
|
target_package = source_package
|
|
|
|
|
|
|
|
for request, action in request_action_list(
|
2022-02-18 15:50:32 +01:00
|
|
|
apiurl, target_project, target_package, REQUEST_STATES_MINUS_ACCEPTED, ['change_devel']):
|
2019-11-01 14:03:11 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
message = message_suffix('created', message)
|
|
|
|
|
|
|
|
def create_function():
|
|
|
|
return create_change_devel_request(apiurl, source_project, source_package,
|
|
|
|
target_project, target_package, message)
|
|
|
|
|
|
|
|
return RequestFuture('change_devel {}/{} -> {}/{}'.format(
|
|
|
|
source_project, source_package, target_project, target_package), create_function)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-09-18 18:03:57 -05:00
|
|
|
def create_delete_request(apiurl, target_project, target_package=None, message=None):
|
2021-07-06 13:34:59 +02:00
|
|
|
"""Create a delete request"""
|
2019-09-18 18:03:57 -05:00
|
|
|
|
2021-07-06 13:34:59 +02:00
|
|
|
action = Action('delete', tgt_project=target_project, tgt_package=target_package)
|
|
|
|
return create_request(apiurl, action, message)
|
2019-11-01 14:03:11 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-01 14:03:11 -05:00
|
|
|
def create_change_devel_request(apiurl, source_project, source_package,
|
|
|
|
target_project, target_package=None, message=None):
|
2021-07-06 13:34:59 +02:00
|
|
|
"""Create a change_devel request"""
|
2019-11-01 14:03:11 -05:00
|
|
|
|
2021-07-06 13:34:59 +02:00
|
|
|
action = Action('change_devel', src_project=source_project, src_package=source_package,
|
2022-02-18 17:35:33 +01:00
|
|
|
tgt_project=target_project, tgt_package=target_package)
|
2021-07-06 13:34:59 +02:00
|
|
|
return create_request(apiurl, action, message)
|
2019-11-01 14:03:11 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2021-07-07 17:17:52 +02:00
|
|
|
def create_add_role_request(apiurl, target_project, user, role, target_package=None, message=None):
|
|
|
|
"""Create an add_role request
|
|
|
|
|
2021-07-13 16:30:53 +01:00
|
|
|
user -- user or group name. If it is a group, it has to start with 'group:'.
|
2021-07-07 17:17:52 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
if user.startswith('group:'):
|
|
|
|
group = user.replace('group:', '')
|
|
|
|
kargs = dict(group_name=group, group_role=role)
|
|
|
|
else:
|
|
|
|
kargs = dict(person_name=user, person_role=role)
|
|
|
|
|
|
|
|
action = Action('add_role', tgt_project=target_project, tgt_package=target_package, **kargs)
|
|
|
|
return create_request(apiurl, action, message)
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2022-04-05 18:08:34 +02:00
|
|
|
def create_set_bugowner_request(apiurl, target_project, user, target_package=None, message=None):
|
|
|
|
"""Create an set_bugowner request
|
|
|
|
|
|
|
|
user -- user or group name. If it is a group, it has to start with 'group:'.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if user.startswith('group:'):
|
|
|
|
group = user.replace('group:', '')
|
|
|
|
kargs = dict(group_name=group)
|
|
|
|
else:
|
|
|
|
kargs = dict(person_name=user)
|
|
|
|
|
|
|
|
action = Action('set_bugowner', tgt_project=target_project, tgt_package=target_package, **kargs)
|
|
|
|
return create_request(apiurl, action, message)
|
|
|
|
|
|
|
|
|
2021-07-06 13:34:59 +02:00
|
|
|
def create_request(apiurl, action, message=None):
|
|
|
|
"""Create a request for the given action
|
2019-11-01 14:03:11 -05:00
|
|
|
|
2021-07-06 13:34:59 +02:00
|
|
|
If no message is given, one is generated (see add_description)
|
|
|
|
"""
|
2019-11-01 14:03:11 -05:00
|
|
|
|
2021-07-06 13:34:59 +02:00
|
|
|
r = Request()
|
|
|
|
r.actions.append(action)
|
|
|
|
add_description(r, message)
|
2019-09-18 18:03:57 -05:00
|
|
|
|
2021-07-06 13:34:59 +02:00
|
|
|
r.create(apiurl)
|
|
|
|
return r.reqid
|
2019-09-18 18:03:57 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-08-07 17:26:38 -05:00
|
|
|
class RequestFuture:
|
|
|
|
def __init__(self, description, create_function):
|
|
|
|
self.description = description
|
|
|
|
self.create_function = create_function
|
|
|
|
|
|
|
|
def create(self):
|
|
|
|
return self.create_function()
|
|
|
|
|
2019-09-11 18:18:39 -05:00
|
|
|
def create_tolerant(self):
|
|
|
|
try:
|
|
|
|
return self.create()
|
|
|
|
except HTTPError:
|
|
|
|
traceback.print_exc()
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2019-09-11 18:18:51 -05:00
|
|
|
def print_and_create(self, dry=False):
|
|
|
|
if dry:
|
|
|
|
print(self)
|
|
|
|
return None
|
|
|
|
|
|
|
|
request_id = self.create_tolerant()
|
|
|
|
print('{} = {}'.format(self, request_id))
|
|
|
|
return request_id
|
|
|
|
|
2019-08-07 17:26:38 -05:00
|
|
|
def __str__(self):
|
|
|
|
return self.description
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2021-07-06 13:34:59 +02:00
|
|
|
def add_description(request, text=None):
|
|
|
|
"""Add a description to the given request.
|
|
|
|
|
|
|
|
If a text is given, that is used as description. Otherwise a generic text is generated.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not text:
|
|
|
|
text = message_suffix('created')
|
|
|
|
request.description = text
|
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-08-07 17:26:38 -05:00
|
|
|
def message_suffix(action, message=None):
|
|
|
|
if not message:
|
|
|
|
message = '{} by OSRT tools'.format(action)
|
|
|
|
|
|
|
|
message += ' (host {})'.format(socket.gethostname())
|
|
|
|
return message
|
2019-11-01 14:04:10 -05:00
|
|
|
|
2022-02-18 17:15:48 +01:00
|
|
|
|
2019-11-01 14:04:10 -05:00
|
|
|
def request_state_change(apiurl, request_id, state):
|
2022-02-18 17:23:02 +01:00
|
|
|
query = {'newstate': state, 'cmd': 'changestate'}
|
2019-11-01 14:04:10 -05:00
|
|
|
url = makeurl(apiurl, ['request', request_id], query)
|
2022-02-18 10:16:17 +01:00
|
|
|
return ET.parse(http_POST(url)).getroot().get('code')
|