2017-06-22 23:16:27 -05:00
|
|
|
from collections import namedtuple
|
2017-06-22 23:14:31 -05:00
|
|
|
from datetime import datetime
|
2018-01-25 21:26:27 -06:00
|
|
|
from dateutil.parser import parse as date_parse
|
2017-06-22 23:16:27 -05:00
|
|
|
import re
|
2017-06-14 23:50:28 -05:00
|
|
|
from xml.etree import cElementTree as ET
|
2018-02-05 19:43:23 -06:00
|
|
|
from lxml import etree as ETL
|
2018-01-17 18:02:37 -06:00
|
|
|
from urllib2 import HTTPError
|
2017-06-14 23:50:28 -05:00
|
|
|
|
2017-06-22 23:16:27 -05:00
|
|
|
from osc.core import get_binarylist
|
2017-06-20 16:37:16 -05:00
|
|
|
from osc.core import get_dependson
|
2017-06-14 23:50:28 -05:00
|
|
|
from osc.core import http_GET
|
|
|
|
from osc.core import makeurl
|
2017-06-22 23:12:45 -05:00
|
|
|
from osc.core import owner
|
2018-01-29 03:14:24 -06:00
|
|
|
from osc.core import Request
|
2018-01-17 18:02:37 -06:00
|
|
|
from osc.core import show_package_meta
|
2017-06-14 23:50:57 -05:00
|
|
|
from osc.core import show_project_meta
|
2017-06-14 23:50:28 -05:00
|
|
|
from osclib.memoize import memoize
|
|
|
|
|
2018-04-24 17:20:39 -05:00
|
|
|
BINARY_REGEX = r'(?:.*::)?(?P<filename>(?P<name>.*)-(?P<version>[^-]+)-(?P<release>[^-]+)\.(?P<arch>[^-\.]+))'
|
2017-08-02 21:04:08 -05:00
|
|
|
RPM_REGEX = BINARY_REGEX + '\.rpm'
|
2017-08-02 21:04:34 -05:00
|
|
|
BinaryParsed = namedtuple('BinaryParsed', ('package', 'filename', 'name', 'arch'))
|
2017-06-22 23:16:27 -05:00
|
|
|
|
2018-03-05 17:17:35 -06:00
|
|
|
@memoize(session=True)
|
|
|
|
def group_members(apiurl, group, maintainers=False):
|
|
|
|
url = makeurl(apiurl, ['group', group])
|
|
|
|
root = ETL.parse(http_GET(url)).getroot()
|
|
|
|
|
|
|
|
if maintainers:
|
|
|
|
return root.xpath('maintainer/@userid')
|
|
|
|
|
|
|
|
return root.xpath('person/person/@userid')
|
2017-05-11 23:31:27 -05:00
|
|
|
|
|
|
|
@memoize(session=True)
|
|
|
|
def owner_fallback(apiurl, project, package):
|
2017-06-22 23:12:45 -05:00
|
|
|
root = owner(apiurl, package, project=project)
|
|
|
|
entry = root.find('owner')
|
2017-08-18 16:03:04 -05:00
|
|
|
if not entry or project.startswith(entry.get('project')):
|
2017-05-11 23:31:27 -05:00
|
|
|
# Fallback to global (ex Factory) maintainer.
|
2017-06-22 23:12:45 -05:00
|
|
|
root = owner(apiurl, package)
|
2017-05-11 23:31:27 -05:00
|
|
|
return root
|
|
|
|
|
2017-06-20 16:34:35 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def maintainers_get(apiurl, project, package=None):
|
|
|
|
if package is None:
|
|
|
|
meta = ET.fromstring(''.join(show_project_meta(apiurl, project)))
|
|
|
|
return [p.get('userid') for p in meta.findall('.//person') if p.get('role') == 'maintainer']
|
|
|
|
|
2017-05-11 23:31:27 -05:00
|
|
|
root = owner_fallback(apiurl, project, package)
|
|
|
|
maintainers = [p.get('name') for p in root.findall('.//person') if p.get('role') == 'maintainer']
|
|
|
|
if not maintainers:
|
|
|
|
for group in [p.get('name') for p in root.findall('.//group') if p.get('role') == 'maintainer']:
|
2018-03-05 17:18:22 -06:00
|
|
|
maintainers = maintainers + group_members(apiurl, group)
|
2017-05-11 23:31:27 -05:00
|
|
|
return maintainers
|
|
|
|
|
2017-06-14 23:50:28 -05:00
|
|
|
@memoize(session=True)
|
|
|
|
def package_list(apiurl, project):
|
|
|
|
url = makeurl(apiurl, ['source', project], { 'expand': 1 })
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
|
|
|
packages = []
|
|
|
|
for package in root.findall('entry'):
|
|
|
|
packages.append(package.get('name'))
|
|
|
|
|
|
|
|
return sorted(packages)
|
2017-06-14 23:50:57 -05:00
|
|
|
|
|
|
|
@memoize(session=True)
|
|
|
|
def target_archs(apiurl, project):
|
|
|
|
meta = show_project_meta(apiurl, project)
|
|
|
|
meta = ET.fromstring(''.join(meta))
|
|
|
|
archs = []
|
|
|
|
for arch in meta.findall('repository[@name="standard"]/arch'):
|
|
|
|
archs.append(arch.text)
|
|
|
|
return archs
|
2017-06-20 16:37:16 -05:00
|
|
|
|
|
|
|
@memoize(session=True)
|
|
|
|
def depends_on(apiurl, project, repository, packages=None, reverse=None):
|
|
|
|
dependencies = set()
|
|
|
|
for arch in target_archs(apiurl, project):
|
|
|
|
root = ET.fromstring(get_dependson(apiurl, project, repository, arch, packages, reverse))
|
|
|
|
dependencies.update(pkgdep.text for pkgdep in root.findall('.//pkgdep'))
|
|
|
|
|
|
|
|
return dependencies
|
2017-06-22 23:14:31 -05:00
|
|
|
|
|
|
|
def request_when_staged(request, project, first=False):
|
|
|
|
when = None
|
|
|
|
for history in request.statehistory:
|
|
|
|
if project in history.comment:
|
|
|
|
when = history.when
|
|
|
|
|
2018-01-25 21:26:27 -06:00
|
|
|
return date_parse(when)
|
2017-06-22 23:14:31 -05:00
|
|
|
|
|
|
|
def request_staged(request):
|
|
|
|
for review in request.reviews:
|
|
|
|
if (review.state == 'new' and review.by_project and
|
|
|
|
review.by_project.startswith(request.actions[0].tgt_project)):
|
|
|
|
|
|
|
|
# Allow time for things to settle.
|
|
|
|
when = request_when_staged(request, review.by_project)
|
|
|
|
if (datetime.utcnow() - when).total_seconds() > 10 * 60:
|
|
|
|
return review.by_project
|
|
|
|
|
|
|
|
return None
|
2017-06-22 23:16:27 -05:00
|
|
|
|
|
|
|
def binary_list(apiurl, project, repository, arch, package=None):
|
|
|
|
parsed = []
|
|
|
|
for binary in get_binarylist(apiurl, project, repository, arch, package):
|
2017-08-02 21:04:08 -05:00
|
|
|
result = re.match(RPM_REGEX, binary)
|
2017-06-22 23:16:27 -05:00
|
|
|
if not result:
|
|
|
|
continue
|
|
|
|
|
2017-08-02 21:04:08 -05:00
|
|
|
name = result.group('name')
|
2017-06-22 23:16:27 -05:00
|
|
|
if name.endswith('-debuginfo') or name.endswith('-debuginfo-32bit'):
|
|
|
|
continue
|
|
|
|
if name.endswith('-debugsource'):
|
|
|
|
continue
|
2017-08-02 21:04:08 -05:00
|
|
|
if result.group('arch') == 'src':
|
2017-06-22 23:16:27 -05:00
|
|
|
continue
|
|
|
|
|
2017-08-02 21:04:34 -05:00
|
|
|
parsed.append(BinaryParsed(package, result.group('filename'), name, result.group('arch')))
|
2017-06-22 23:16:27 -05:00
|
|
|
|
|
|
|
return parsed
|
2017-08-02 21:05:21 -05:00
|
|
|
|
2017-08-23 17:03:59 -05:00
|
|
|
@memoize(session=True)
|
2018-04-26 11:23:32 -05:00
|
|
|
def package_binary_list(apiurl, project, repository, arch, package=None, strip_multibuild=True, exclude_src_debug=False):
|
2017-08-02 21:05:21 -05:00
|
|
|
path = ['build', project, repository, arch]
|
|
|
|
if package:
|
|
|
|
path.append(package)
|
|
|
|
url = makeurl(apiurl, path, {'view': 'binaryversions'})
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
|
|
|
package_binaries = []
|
|
|
|
binary_map = {} # last duplicate wins
|
|
|
|
for binary_list in root:
|
2018-04-26 11:23:32 -05:00
|
|
|
package = binary_list.get('package')
|
|
|
|
if strip_multibuild:
|
|
|
|
package = package.split(':', 1)[0]
|
|
|
|
|
2017-08-02 21:05:21 -05:00
|
|
|
for binary in binary_list:
|
|
|
|
filename = binary.get('name')
|
|
|
|
result = re.match(RPM_REGEX, filename)
|
|
|
|
if not result:
|
|
|
|
continue
|
|
|
|
|
2018-04-23 18:54:46 -05:00
|
|
|
binary = BinaryParsed(package, result.group('filename'),
|
|
|
|
result.group('name'), result.group('arch'))
|
|
|
|
if exclude_src_debug and binary_src_debug(binary):
|
|
|
|
continue
|
|
|
|
|
|
|
|
package_binaries.append(binary)
|
2017-08-02 21:05:21 -05:00
|
|
|
binary_map[result.group('filename')] = package
|
|
|
|
|
|
|
|
return package_binaries, binary_map
|
2018-01-17 18:02:37 -06:00
|
|
|
|
2018-04-23 18:54:46 -05:00
|
|
|
def binary_src_debug(binary):
|
|
|
|
return (
|
|
|
|
binary.arch == 'src' or
|
|
|
|
binary.name.endswith('-debuginfo') or
|
|
|
|
binary.name.endswith('-debugsource')
|
|
|
|
)
|
|
|
|
|
2018-01-17 18:02:37 -06:00
|
|
|
@memoize(session=True)
|
|
|
|
def devel_project_get(apiurl, target_project, target_package):
|
|
|
|
try:
|
|
|
|
meta = ET.fromstring(''.join(show_package_meta(apiurl, target_project, target_package)))
|
|
|
|
node = meta.find('devel')
|
|
|
|
if node is not None:
|
|
|
|
return node.get('project'), node.get('package')
|
|
|
|
except HTTPError as e:
|
|
|
|
if e.code != 404:
|
|
|
|
raise e
|
|
|
|
|
|
|
|
return None, None
|
2018-01-17 18:04:47 -06:00
|
|
|
|
|
|
|
@memoize(session=True)
|
|
|
|
def devel_project_fallback(apiurl, target_project, target_package):
|
|
|
|
project, package = devel_project_get(apiurl, target_project, target_package)
|
2018-01-17 18:05:17 -06:00
|
|
|
if project is None and target_project != 'openSUSE:Factory':
|
2018-01-17 18:04:47 -06:00
|
|
|
if target_project.startswith('openSUSE:'):
|
|
|
|
project, package = devel_project_get(apiurl, 'openSUSE:Factory', target_package)
|
|
|
|
elif target_project.startswith('SUSE:'):
|
|
|
|
# For SLE (assume IBS), fallback to openSUSE:Factory devel projects.
|
|
|
|
project, package = devel_project_get(apiurl, 'openSUSE.org:openSUSE:Factory', target_package)
|
|
|
|
if project:
|
|
|
|
# Strip openSUSE.org: prefix since string since not used for lookup.
|
|
|
|
project = project.split(':', 1)[1]
|
|
|
|
|
|
|
|
return project, package
|
2018-01-25 21:26:27 -06:00
|
|
|
|
|
|
|
def request_age(request):
|
2018-01-29 03:14:24 -06:00
|
|
|
if isinstance(request, Request):
|
|
|
|
created = request.statehistory[0].when
|
|
|
|
else:
|
|
|
|
created = request.find('history').get('when')
|
|
|
|
created = date_parse(created)
|
|
|
|
return datetime.utcnow() - created
|
2018-02-05 19:43:23 -06:00
|
|
|
|
|
|
|
def project_list_prefix(apiurl, prefix):
|
|
|
|
"""Get a list of project with the same prefix."""
|
|
|
|
query = {'match': 'starts-with(@name, "{}")'.format(prefix)}
|
|
|
|
url = makeurl(apiurl, ['search', 'project', 'id'], query)
|
|
|
|
root = ETL.parse(http_GET(url)).getroot()
|
|
|
|
return root.xpath('project/@name')
|
2018-03-24 13:34:44 +01:00
|
|
|
|
|
|
|
#
|
|
|
|
# Depdendency helpers
|
|
|
|
#
|
|
|
|
def fileinfo_ext_all(apiurl, project, repo, arch, package):
|
|
|
|
url = makeurl(apiurl, ['build', project, repo, arch, package])
|
|
|
|
binaries = ET.parse(http_GET(url)).getroot()
|
|
|
|
for binary in binaries.findall('binary'):
|
|
|
|
filename = binary.get('filename')
|
|
|
|
if not filename.endswith('.rpm'):
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield fileinfo_ext(apiurl, project, repo, arch, package, filename)
|
|
|
|
|
|
|
|
def fileinfo_ext(apiurl, project, repo, arch, package, filename):
|
|
|
|
url = makeurl(apiurl,
|
|
|
|
['build', project, repo, arch, package, filename],
|
|
|
|
{'view': 'fileinfo_ext'})
|
|
|
|
return ET.parse(http_GET(url)).getroot()
|
2018-04-10 22:28:48 -05:00
|
|
|
|
|
|
|
def entity_email(apiurl, key, entity_type='person', include_name=False):
|
|
|
|
url = makeurl(apiurl, [entity_type, key])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
|
|
|
email = root.find('email')
|
|
|
|
if email is None:
|
|
|
|
return None
|
|
|
|
email = email.text
|
|
|
|
|
|
|
|
realname = root.find('realname')
|
|
|
|
if include_name and realname is not None:
|
|
|
|
email = '{} <{}>'.format(realname.text, email)
|
|
|
|
|
|
|
|
return email
|
2018-04-13 17:08:08 -05:00
|
|
|
|
|
|
|
def source_file_load(apiurl, project, package, filename, revision=None):
|
|
|
|
query = {'expand': 1}
|
|
|
|
if revision:
|
|
|
|
query['rev'] = revision
|
|
|
|
url = makeurl(apiurl, ['source', project, package, filename], query)
|
|
|
|
try:
|
|
|
|
return http_GET(url).read()
|
|
|
|
except HTTPError:
|
|
|
|
return None
|
2018-04-17 17:18:44 -05:00
|
|
|
|
|
|
|
# Should be an API call that says give me "real" packages that does not include
|
|
|
|
# multibuild entries nor linked packages.
|
|
|
|
def package_list_without_links(apiurl, project):
|
|
|
|
query = {
|
|
|
|
'view': 'info',
|
|
|
|
'nofilename': '1',
|
|
|
|
}
|
|
|
|
url = makeurl(apiurl, ['source', project], query)
|
|
|
|
root = ETL.parse(http_GET(url)).getroot()
|
|
|
|
return root.xpath(
|
|
|
|
'//sourceinfo[not(./linked[@project="{}"]) and not(contains(@package, ":"))]/@package'.format(project))
|