Merge pull request #1869 from jberry-suse/origin-manager
origin-manager: core implementation (replace leaper/manager_42)
This commit is contained in:
commit
d43faf0b71
@ -47,6 +47,7 @@ matrix:
|
||||
- pip install flake8
|
||||
script:
|
||||
- flake8
|
||||
- ./dist/ci/flake-extra
|
||||
- env: TEST_SUITE=nosetests
|
||||
sudo: required
|
||||
services:
|
||||
|
4
dist/ci/flake-extra
vendored
Executable file
4
dist/ci/flake-extra
vendored
Executable file
@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Should never contain references to products.
|
||||
! grep -iP 'leap|factory|sle' origin-manager.py osclib/origin.py
|
5
dist/obs/OSRT:OriginConfig.xml
vendored
Normal file
5
dist/obs/OSRT:OriginConfig.xml
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
<definition name="OriginConfig" namespace="OSRT">
|
||||
<description>OriginManager configuration</description>
|
||||
<count>1</count>
|
||||
<modifiable_by role="maintainer"/>
|
||||
</definition>
|
24
dist/package/openSUSE-release-tools.spec
vendored
24
dist/package/openSUSE-release-tools.spec
vendored
@ -188,6 +188,16 @@ Requires: xz
|
||||
%description metrics-access
|
||||
Ingest download.o.o Apache access logs and generate metrics.
|
||||
|
||||
%package origin-manager
|
||||
Summary: Package origin management tools
|
||||
Group: Development/Tools/Other
|
||||
BuildArch: noarch
|
||||
Requires: osclib = %{version}
|
||||
Requires(pre): shadow
|
||||
|
||||
%description origin-manager
|
||||
Tools for managing the origin of package sources and keeping them in sync.
|
||||
|
||||
%package repo-checker
|
||||
Summary: Repository checker service
|
||||
Group: Development/Tools/Other
|
||||
@ -391,6 +401,14 @@ if [ -x /usr/bin/systemctl ] && /usr/bin/systemctl is-enabled osrt-obs-operator
|
||||
/usr/bin/systemctl try-restart --no-block osrt-obs-operator
|
||||
fi
|
||||
|
||||
%pre origin-manager
|
||||
getent passwd osrt-origin-manager > /dev/null || \
|
||||
useradd -r -m -s /sbin/nologin -c "user for openSUSE-release-tools-origin-manager" osrt-origin-manager
|
||||
exit 0
|
||||
|
||||
%postun origin-manager
|
||||
%systemd_postun
|
||||
|
||||
%pre repo-checker
|
||||
getent passwd osrt-repo-checker > /dev/null || \
|
||||
useradd -r -m -s /sbin/nologin -c "user for openSUSE-release-tools-repo-checker" osrt-repo-checker
|
||||
@ -468,6 +486,7 @@ exit 0
|
||||
%exclude %{_datadir}/%{source_dir}/metrics
|
||||
%exclude %{_datadir}/%{source_dir}/metrics.py
|
||||
%exclude %{_datadir}/%{source_dir}/metrics_release.py
|
||||
%exclude %{_datadir}/%{source_dir}/origin-manager.py
|
||||
%exclude %{_bindir}/osrt-staging-report
|
||||
%exclude %{_datadir}/%{source_dir}/pkglistgen
|
||||
%exclude %{_datadir}/%{source_dir}/pkglistgen.py
|
||||
@ -573,6 +592,11 @@ exit 0
|
||||
%{_bindir}/osrt-obs_operator
|
||||
%{_unitdir}/osrt-obs-operator.service
|
||||
|
||||
%files origin-manager
|
||||
%{_bindir}/osrt-origin-manager
|
||||
%{_unitdir}/osrt-origin-manager.service
|
||||
%{_unitdir}/osrt-origin-manager.timer
|
||||
|
||||
%files repo-checker
|
||||
%defattr(-,root,root,-)
|
||||
%{_bindir}/osrt-repo_checker
|
||||
|
99
origin-manager.py
Executable file
99
origin-manager.py
Executable file
@ -0,0 +1,99 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
from osclib.core import package_source_hash
|
||||
from osclib.origin import origin_annotation_dump
|
||||
from osclib.origin import config_load
|
||||
from osclib.origin import origin_find
|
||||
from osclib.origin import policy_evaluate
|
||||
import ReviewBot
|
||||
import sys
|
||||
|
||||
|
||||
class OriginManager(ReviewBot.ReviewBot):
|
||||
def __init__(self, *args, **kwargs):
|
||||
ReviewBot.ReviewBot.__init__(self, *args, **kwargs)
|
||||
|
||||
# ReviewBot options.
|
||||
self.request_default_return = True
|
||||
# No such thing as override, only changing origin which must be approved
|
||||
# by fallback group. Annotation must be included in review.
|
||||
self.override_allow = False
|
||||
|
||||
def check_source_submission(self, src_project, src_package, src_rev, tgt_project, tgt_package):
|
||||
if not self.config_validate(tgt_project):
|
||||
return False
|
||||
|
||||
source_hash_new = package_source_hash(self.apiurl, src_project, src_package, src_rev)
|
||||
origin_info_new = origin_find(self.apiurl, tgt_project, tgt_package, source_hash_new)
|
||||
|
||||
source_hash_old = package_source_hash(self.apiurl, tgt_project, tgt_package)
|
||||
origin_info_old = origin_find(self.apiurl, tgt_project, tgt_package, source_hash_old, True)
|
||||
|
||||
result = policy_evaluate(self.apiurl, tgt_project, tgt_package,
|
||||
origin_info_new, origin_info_old,
|
||||
source_hash_new, source_hash_old)
|
||||
return self.policy_result_handle(tgt_project, tgt_package, origin_info_new, origin_info_old, result)
|
||||
|
||||
def config_validate(self, target_project):
|
||||
config = config_load(self.apiurl, target_project)
|
||||
if not config:
|
||||
self.review_messages['declined'] = 'OSRT:OriginConfig attribute missing'
|
||||
return False
|
||||
if not config.get('fallback-group'):
|
||||
self.review_messages['declined'] = 'OSRT:OriginConfig.fallback-group missing'
|
||||
return False
|
||||
if not self.dryrun and config['review-user'] != self.review_user:
|
||||
self.logger.warning(
|
||||
'OSRT:OriginConfig.review-user ({}) does not match ReviewBot.review_user ({})'.format(
|
||||
config['review-user'], self.review_user))
|
||||
|
||||
return True
|
||||
|
||||
def policy_result_handle(self, project, package, origin_info_new, origin_info_old, result):
|
||||
if len(result.reviews):
|
||||
self.policy_result_reviews_add(project, package, result.reviews)
|
||||
|
||||
self.policy_result_comment_add(project, package, result.comments)
|
||||
|
||||
if not result.wait:
|
||||
if result.accept:
|
||||
self.review_messages['accepted'] = origin_annotation_dump(origin_info_new, origin_info_old)
|
||||
return result.accept
|
||||
|
||||
return None
|
||||
|
||||
def policy_result_reviews_add(self, project, package, reviews):
|
||||
for key, comment in reviews.items():
|
||||
if key == 'maintainer':
|
||||
self.devel_project_review_ensure(self.request, project, package, comment)
|
||||
elif key == 'fallback':
|
||||
fallback_group = config_load(self.apiurl, project).get('fallback-group')
|
||||
self.add_review(self.request, by_group=fallback_group, msg=comment)
|
||||
else:
|
||||
self.add_review(self.request, by_group=key, msg=comment)
|
||||
|
||||
def policy_result_comment_add(self, project, package, comments):
|
||||
message = '\n\n'.join(comments)
|
||||
if len(self.request.actions) > 1:
|
||||
message = '## {}/{}\n\n{}'.format(project, package, message)
|
||||
suffix = '::'.join([project, package])
|
||||
else:
|
||||
suffix = None
|
||||
|
||||
only_replace = False
|
||||
if not len(comments):
|
||||
message = 'Previous comment no longer relevant.'
|
||||
only_replace = True
|
||||
|
||||
self.comment_write(state='seen', message=message, identical=True,
|
||||
only_replace=only_replace, bot_name_suffix=suffix)
|
||||
|
||||
|
||||
class CommandLineInterface(ReviewBot.CommandLineInterface):
|
||||
def __init__(self, *args, **kwargs):
|
||||
ReviewBot.CommandLineInterface.__init__(self, *args, **kwargs)
|
||||
self.clazz = OriginManager
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = CommandLineInterface()
|
||||
sys.exit(app.main())
|
@ -99,6 +99,8 @@ class Cache(object):
|
||||
r'/source$': TTL_LONG,
|
||||
# Sources will be expired with project, could be done on package level.
|
||||
r'/source/([^/?]+)(?:\?.*)?$': TTL_LONG,
|
||||
# Handle origin-manager repetative package_source_hash_history() calls.
|
||||
r'/source/([^/]+)/(?:[^/]+)/(?:_history)$': TTL_SHORT,
|
||||
r'/source/([^/]+)/(?:[^/]+)/(?:_meta|_link)$': TTL_LONG,
|
||||
r'/source/([^/]+)/dashboard/[^/]+': TTL_LONG,
|
||||
r'/source/([^/]+)/_attribute/[^/]+': TTL_DUPLICATE,
|
||||
|
@ -121,6 +121,7 @@ DEFAULT = {
|
||||
r'openSUSE:(?P<project>Leap:(?P<version>[\d.]+)(?::NonFree)?:Update)$': {
|
||||
'main-repo': 'standard',
|
||||
'leaper-override-group': 'leap-reviewers',
|
||||
'repo-checker': 'repo-checker',
|
||||
'repo_checker-arch-whitelist': 'x86_64',
|
||||
'repo_checker-no-filter': 'True',
|
||||
'repo_checker-package-comment-devel': 'True',
|
||||
@ -207,8 +208,12 @@ class Config(object):
|
||||
@memoize(session=True) # Allow reset by memoize_session_reset() for ReviewBot.
|
||||
def get(apiurl, project):
|
||||
"""Cached version for directly accessing project config."""
|
||||
Config(apiurl, project)
|
||||
return conf.config.get(project, [])
|
||||
# Properly handle loading the config for interconnect projects.
|
||||
from osclib.core import project_remote_apiurl
|
||||
apiurl_remote, project_remote = project_remote_apiurl(apiurl, project)
|
||||
|
||||
Config(apiurl_remote, project_remote)
|
||||
return conf.config.get(project_remote, [])
|
||||
|
||||
@property
|
||||
def conf(self):
|
||||
|
195
osclib/core.py
195
osclib/core.py
@ -21,6 +21,7 @@ from osc.core import http_PUT
|
||||
from osc.core import makeurl
|
||||
from osc.core import owner
|
||||
from osc.core import Request
|
||||
from osc.core import search
|
||||
from osc.core import show_package_meta
|
||||
from osc.core import show_project_meta
|
||||
from osc.core import show_results_meta
|
||||
@ -461,3 +462,197 @@ def project_meta_revision(apiurl, project):
|
||||
root = ET.fromstringlist(get_commitlog(
|
||||
apiurl, project, '_project', None, format='xml', meta=True))
|
||||
return int(root.find('logentry').get('revision'))
|
||||
|
||||
def entity_exists(apiurl, project, package=None):
|
||||
try:
|
||||
http_GET(makeurl(apiurl, filter(None, ['source', project, package]) + ['_meta']))
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
|
||||
raise e
|
||||
|
||||
return True
|
||||
|
||||
def entity_source_link(apiurl, project, package=None):
|
||||
try:
|
||||
if package:
|
||||
parts = ['source', project, package, '_link']
|
||||
else:
|
||||
parts = ['source', project, '_meta']
|
||||
url = makeurl(apiurl, parts)
|
||||
root = ETL.parse(http_GET(url)).getroot()
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
|
||||
raise e
|
||||
|
||||
return root if package else root.find('link')
|
||||
|
||||
@memoize(session=True)
|
||||
def package_source_link_copy(apiurl, project, package):
|
||||
link = entity_source_link(apiurl, project, package)
|
||||
return link is not None and link.get('cicount') == 'copy'
|
||||
|
||||
# Ideally, all package_source_hash* functions would operate on srcmd5, but
|
||||
# unfortunately that is not practical for real use-cases. The srcmd5 includes
|
||||
# service run information in addition to the presence of a link even if the
|
||||
# expanded sources are identical. The verifymd5 sum excludes such information
|
||||
# and only covers the sources (as should be the point), but looks at the link
|
||||
# sources which means for projects like devel which link to the head revision of
|
||||
# downstream all the verifymd5 sums are the same. This makes the summary md5s
|
||||
# provided by OBS useless for comparing source and really anything. Instead the
|
||||
# individual file md5s are used to generate a sha1 which is used for comparison.
|
||||
# In the case of maintenance projects they are structured such that the updates
|
||||
# are suffixed packages and the unsuffixed package is empty and only links to
|
||||
# a specific suffixed package each revision. As such for maintenance projects
|
||||
# the link must be expanded and is safe to do so. Additionally, projects that
|
||||
# inherit packages need to same treatment (ie. expanding) until they are
|
||||
# overridden within the project.
|
||||
@memoize(session=True)
|
||||
def package_source_hash(apiurl, project, package, revision=None):
|
||||
query = {}
|
||||
if revision:
|
||||
query['rev'] = revision
|
||||
|
||||
# Will not catch packages that previous had a link, but no longer do.
|
||||
if package_source_link_copy(apiurl, project, package):
|
||||
query['expand'] = 1
|
||||
|
||||
try:
|
||||
url = makeurl(apiurl, ['source', project, package], query)
|
||||
root = ETL.parse(http_GET(url)).getroot()
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
|
||||
raise e
|
||||
|
||||
if revision and root.find('error') is not None:
|
||||
# OBS returns XML error instead of HTTP 404 if revision not found.
|
||||
return None
|
||||
|
||||
from osclib.util import sha1_short
|
||||
return sha1_short(root.xpath('entry[@name!="_link"]/@md5'))
|
||||
|
||||
def package_source_hash_history(apiurl, project, package, limit=5, include_project_link=False):
|
||||
try:
|
||||
# get_commitlog() reverses the order so newest revisions are first.
|
||||
root = ETL.fromstringlist(
|
||||
get_commitlog(apiurl, project, package, None, format='xml'))
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return
|
||||
|
||||
raise e
|
||||
|
||||
if include_project_link:
|
||||
source_hashes = []
|
||||
|
||||
source_md5s = root.xpath('logentry/@srcmd5')
|
||||
for source_md5 in source_md5s[:limit]:
|
||||
source_hash = package_source_hash(apiurl, project, package, source_md5)
|
||||
yield source_hash
|
||||
|
||||
if include_project_link:
|
||||
source_hashes.append(source_hash)
|
||||
|
||||
if include_project_link and (not limit or len(source_md5s) < limit):
|
||||
link = entity_source_link(apiurl, project)
|
||||
if link is None:
|
||||
return
|
||||
project = link.get('project')
|
||||
|
||||
if limit:
|
||||
limit_remaining = limit - len(source_md5s)
|
||||
|
||||
# Allow small margin for duplicates.
|
||||
for source_hash in package_source_hash_history(apiurl, project, package, None, True):
|
||||
if source_hash in source_hashes:
|
||||
continue
|
||||
|
||||
yield source_hash
|
||||
|
||||
if limit:
|
||||
limit_remaining += -1
|
||||
if limit_remaining == 0:
|
||||
break
|
||||
|
||||
@memoize(session=True)
|
||||
def project_remote_list(apiurl):
|
||||
remotes = {}
|
||||
|
||||
root = search(apiurl, project='starts-with(remoteurl, "http")')['project']
|
||||
for project in root.findall('project'):
|
||||
# Strip ending /public as the only use-cases for manually checking
|
||||
# remote projects is to query them directly to use an API that does not
|
||||
# work over the interconnect. As such /public will have same problem.
|
||||
remotes[project.get('name')] = re.sub('/public$', '', project.find('remoteurl').text)
|
||||
|
||||
return remotes
|
||||
|
||||
def project_remote_apiurl(apiurl, project):
|
||||
remotes = project_remote_list(apiurl)
|
||||
for remote in remotes:
|
||||
if project.startswith(remote + ':'):
|
||||
return remotes[remote], project[len(remote) + 1:]
|
||||
|
||||
return apiurl, project
|
||||
|
||||
def review_find_last(request, who):
|
||||
for review in reversed(request.reviews):
|
||||
if review.who == who:
|
||||
return review
|
||||
|
||||
return None
|
||||
|
||||
def reviews_remaining(request):
|
||||
reviews = []
|
||||
for review in request.reviews:
|
||||
if review.state != 'accepted':
|
||||
reviews.append(review_short(review))
|
||||
|
||||
return reviews
|
||||
|
||||
def review_short(review):
|
||||
if review.by_user:
|
||||
return review.by_user
|
||||
if review.by_group:
|
||||
return review.by_group
|
||||
if review.by_project:
|
||||
if review.by_package:
|
||||
return '/'.join([review.by_project, review.by_package])
|
||||
return review.by_project
|
||||
|
||||
return None
|
||||
|
||||
def issue_trackers(apiurl):
|
||||
url = makeurl(apiurl, ['issue_trackers'])
|
||||
root = ET.parse(http_GET(url)).getroot()
|
||||
trackers = {}
|
||||
for tracker in root.findall('issue-tracker'):
|
||||
trackers[tracker.find('name').text] = tracker.find('label').text
|
||||
return trackers
|
||||
|
||||
def issue_tracker_by_url(apiurl, tracker_url):
|
||||
url = makeurl(apiurl, ['issue_trackers'])
|
||||
root = ETL.parse(http_GET(url)).getroot()
|
||||
if not tracker_url.endswith('/'):
|
||||
# All trackers are formatted with trailing slash.
|
||||
tracker_url += '/'
|
||||
return next(iter(root.xpath('issue-tracker[url[text()="{}"]]'.format(tracker_url)) or []), None)
|
||||
|
||||
def issue_tracker_label_apply(tracker, identifier):
|
||||
return tracker.find('label').text.replace('@@@', identifier)
|
||||
|
||||
def request_remote_identifier(apiurl, apiurl_remote, request_id):
|
||||
if apiurl_remote == apiurl:
|
||||
return 'request#{}'.format(request_id)
|
||||
|
||||
# The URL differences make this rather convoluted.
|
||||
tracker = issue_tracker_by_url(apiurl, apiurl_remote.replace('api.', 'build.'))
|
||||
if tracker is not None:
|
||||
return issue_tracker_label_apply(tracker, request_id)
|
||||
|
||||
return request_id
|
||||
|
475
osclib/origin.py
Normal file
475
osclib/origin.py
Normal file
@ -0,0 +1,475 @@
|
||||
from copy import deepcopy
|
||||
from collections import namedtuple
|
||||
import logging
|
||||
from osc.core import get_request_list
|
||||
from osclib.conf import Config
|
||||
from osclib.core import attribute_value_load
|
||||
from osclib.core import devel_project_get
|
||||
from osclib.core import entity_exists
|
||||
from osclib.core import package_source_hash
|
||||
from osclib.core import package_source_hash_history
|
||||
from osclib.core import project_remote_apiurl
|
||||
from osclib.core import review_find_last
|
||||
from osclib.core import reviews_remaining
|
||||
from osclib.core import request_remote_identifier
|
||||
from osclib.memoize import memoize
|
||||
from osclib.util import project_list_family
|
||||
from osclib.util import project_list_family_prior_pattern
|
||||
import re
|
||||
import yaml
|
||||
|
||||
NAME = 'origin-manager'
|
||||
DEFAULTS = {
|
||||
'unknown_origin_wait': False,
|
||||
'origins': [],
|
||||
'review-user': '<config:origin-manager-review-user>',
|
||||
'fallback-group': '<config:origin-manager-fallback-group>',
|
||||
'fallback-workaround': {},
|
||||
}
|
||||
POLICY_DEFAULTS = {
|
||||
'additional_reviews': [],
|
||||
'automatic_updates': True,
|
||||
'maintainer_review_always': False,
|
||||
'maintainer_review_initial': True,
|
||||
'pending_submission_allow': False,
|
||||
'pending_submission_consider': False,
|
||||
'pending_submission_allowed_reviews': [
|
||||
'<config_source:staging>*',
|
||||
'<config_source:repo-checker>',
|
||||
],
|
||||
}
|
||||
|
||||
OriginInfo = namedtuple('OriginInfo', ['project', 'pending'])
|
||||
PendingRequestInfo = namedtuple('PendingRequestInfo', ['identifier', 'reviews_remaining'])
|
||||
PolicyResult = namedtuple('PolicyResult', ['wait', 'accept', 'reviews', 'comments'])
|
||||
|
||||
@memoize(session=True)
|
||||
def config_load(apiurl, project):
|
||||
config = attribute_value_load(apiurl, project, 'OriginConfig')
|
||||
if not config:
|
||||
return {}
|
||||
|
||||
return config_resolve(apiurl, project, yaml.safe_load(config))
|
||||
|
||||
def config_origin_generator(origins, apiurl=None, project=None, package=None, skip_workarounds=False):
|
||||
for origin_item in origins:
|
||||
for origin, values in origin_item.items():
|
||||
is_workaround = origin_workaround_check(origin)
|
||||
if skip_workarounds and is_workaround:
|
||||
break
|
||||
|
||||
if (origin == '<devel>' or origin == '<devel>~') and apiurl and project and package:
|
||||
devel_project, devel_package = devel_project_get(apiurl, project, package)
|
||||
if not devel_project:
|
||||
break
|
||||
origin = devel_project
|
||||
if is_workaround:
|
||||
origin = origin_workaround_ensure(origin)
|
||||
|
||||
yield origin, values
|
||||
break # Only support single value inside list item.
|
||||
|
||||
def config_resolve(apiurl, project, config):
|
||||
defaults = POLICY_DEFAULTS.copy()
|
||||
defaults_workarounds = POLICY_DEFAULTS.copy()
|
||||
|
||||
origins_original = config_origin_list(config)
|
||||
|
||||
config_project = Config.get(apiurl, project)
|
||||
config_resolve_variables(config, config_project)
|
||||
|
||||
origins = config['origins']
|
||||
i = 0
|
||||
while i < len(origins):
|
||||
origin = origins[i].keys()[0]
|
||||
values = origins[i][origin]
|
||||
|
||||
if origin == '*':
|
||||
del origins[i]
|
||||
defaults.update(values)
|
||||
defaults_workarounds.update(values)
|
||||
config_resolve_apply(config, values, until='*')
|
||||
elif origin == '*~':
|
||||
del origins[i]
|
||||
defaults_workarounds.update(values)
|
||||
config_resolve_create_workarounds(config, values, origins_original)
|
||||
config_resolve_apply(config, values, workaround=True, until='*~')
|
||||
elif '*' in origin:
|
||||
# Does not allow for family + workaround expansion (ie. foo*~).
|
||||
del origins[i]
|
||||
config_resolve_create_family(apiurl, project, config, i, origin, values)
|
||||
elif origin.endswith('~'):
|
||||
values_new = deepcopy(defaults_workarounds)
|
||||
values_new.update(values)
|
||||
values.update(values_new)
|
||||
i += 1
|
||||
else:
|
||||
values_new = deepcopy(defaults)
|
||||
values_new.update(values)
|
||||
values.update(values_new)
|
||||
i += 1
|
||||
|
||||
return config
|
||||
|
||||
def config_resolve_variables(config, config_project):
|
||||
defaults_merged = DEFAULTS.copy()
|
||||
defaults_merged.update(config)
|
||||
config.update(defaults_merged)
|
||||
|
||||
for key in ['review-user', 'fallback-group']:
|
||||
config[key] = config_resolve_variable(config[key], config_project)
|
||||
|
||||
if not config['review-user']:
|
||||
config['review-user'] = NAME
|
||||
|
||||
for origin, values in config_origin_generator(config['origins']):
|
||||
if 'additional_reviews' in values:
|
||||
values['additional_reviews'] = [
|
||||
config_resolve_variable(v, config_project) for v in values['additional_reviews']]
|
||||
|
||||
def config_resolve_variable(value, config_project, key='config'):
|
||||
prefix = '<{}:'.format(key)
|
||||
end = value.rfind('>')
|
||||
if not value.startswith(prefix) or end == -1:
|
||||
return value
|
||||
|
||||
key = value[len(prefix):end]
|
||||
if key in config_project and config_project[key]:
|
||||
return config_project[key] + value[end + 1:]
|
||||
return ''
|
||||
|
||||
def config_origin_list(config, apiurl=None, project=None, package=None, skip_workarounds=False):
|
||||
origin_list = []
|
||||
for origin, values in config_origin_generator(
|
||||
config['origins'], apiurl, project, package, skip_workarounds):
|
||||
origin_list.append(origin)
|
||||
return origin_list
|
||||
|
||||
def config_resolve_create_workarounds(config, values_workaround, origins_skip):
|
||||
origins = config['origins']
|
||||
i = 0
|
||||
for origin, values in config_origin_generator(origins):
|
||||
i += 1
|
||||
if origin.startswith('*') or origin.endswith('~'):
|
||||
continue
|
||||
|
||||
origin_new = origin + '~'
|
||||
if origin_new in origins_skip:
|
||||
continue
|
||||
|
||||
values_new = deepcopy(values)
|
||||
values_new.update(values_workaround)
|
||||
origins.insert(i, { origin_new: values_new })
|
||||
|
||||
def config_resolve_create_family(apiurl, project, config, position, origin, values):
|
||||
projects = project_list_family_prior_pattern(apiurl, origin, project)
|
||||
for origin_expanded in reversed(projects):
|
||||
config['origins'].insert(position, { origin_expanded: values })
|
||||
|
||||
def config_resolve_apply(config, values_apply, key=None, workaround=False, until=None):
|
||||
for origin, values in config_origin_generator(config['origins']):
|
||||
if workaround and (not origin.endswith('~') or origin == '*~'):
|
||||
continue
|
||||
|
||||
if key:
|
||||
if origin == key:
|
||||
values.update(values)
|
||||
continue
|
||||
|
||||
if until and origin == until:
|
||||
break
|
||||
|
||||
values.update(values_apply)
|
||||
|
||||
def origin_workaround_check(origin):
|
||||
return origin.endswith('~')
|
||||
|
||||
def origin_workaround_ensure(origin):
|
||||
if not origin_workaround_check(origin):
|
||||
return origin + '~'
|
||||
return origin
|
||||
|
||||
@memoize(session=True)
|
||||
def origin_find(apiurl, target_project, package, source_hash=None, current=False,
|
||||
pending_allow=True, fallback=True):
|
||||
config = config_load(apiurl, target_project)
|
||||
|
||||
if not source_hash:
|
||||
current = True
|
||||
source_hash = package_source_hash(apiurl, target_project, package)
|
||||
if not source_hash:
|
||||
return None
|
||||
|
||||
logging.debug('origin_find: {}/{} with source {} ({}, {}, {})'.format(
|
||||
target_project, package, source_hash, current, pending_allow, fallback))
|
||||
|
||||
for origin, values in config_origin_generator(config['origins'], apiurl, target_project, package, True):
|
||||
if project_source_contain(apiurl, origin, package, source_hash):
|
||||
return OriginInfo(origin, False)
|
||||
|
||||
if pending_allow and (values['pending_submission_allow'] or values['pending_submission_consider']):
|
||||
pending = project_source_pending(apiurl, origin, package, source_hash)
|
||||
if pending is not False:
|
||||
return OriginInfo(origin, pending)
|
||||
|
||||
if not fallback:
|
||||
return None
|
||||
|
||||
# Unable to find matching origin, if current fallback to last known origin
|
||||
# and mark as workaround, otherwise return current origin as workaround.
|
||||
if current:
|
||||
origin_info = origin_find_fallback(apiurl, target_project, package, source_hash, config['review-user'])
|
||||
else:
|
||||
origin_info = origin_find(apiurl, target_project, package)
|
||||
|
||||
if origin_info:
|
||||
# Force origin to be workaround since required fallback.
|
||||
origin = origin_workaround_ensure(origin_info.project)
|
||||
if origin in config_origin_list(config, apiurl, target_project, package):
|
||||
return OriginInfo(origin, origin_info.pending)
|
||||
|
||||
return None
|
||||
|
||||
def project_source_contain(apiurl, project, package, source_hash):
|
||||
for source_hash_consider in package_source_hash_history(apiurl, project, package):
|
||||
project_source_log('contain', project, source_hash_consider, source_hash)
|
||||
if source_hash_consider == source_hash:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def project_source_pending(apiurl, project, package, source_hash):
|
||||
apiurl_remote, project_remote = project_remote_apiurl(apiurl, project)
|
||||
requests = get_request_list(apiurl_remote, project_remote, package, None, ['new', 'review'], 'submit')
|
||||
for request in requests:
|
||||
for action in request.actions:
|
||||
source_hash_consider = package_source_hash(
|
||||
apiurl_remote, action.src_project, action.src_package, action.src_rev)
|
||||
|
||||
project_source_log('pending', project, source_hash_consider, source_hash)
|
||||
if source_hash_consider == source_hash:
|
||||
return PendingRequestInfo(
|
||||
request_remote_identifier(apiurl, apiurl_remote, request.reqid),
|
||||
reviews_remaining(request))
|
||||
|
||||
return False
|
||||
|
||||
def project_source_log(key, project, source_hash_consider, source_hash):
|
||||
logging.debug('source_{}: {:<40} {} == {}{}'.format(
|
||||
key, project, source_hash_consider, source_hash,
|
||||
' (match)' if source_hash_consider == source_hash else ''))
|
||||
|
||||
def origin_find_fallback(apiurl, target_project, package, source_hash, user):
|
||||
# Search accepted requests (newest to oldest), find the last review made by
|
||||
# the specified user, load comment as annotation, and extract origin.
|
||||
requests = get_request_list(apiurl, target_project, package, None, ['accepted'], 'submit')
|
||||
for request in sorted(requests, key=lambda r: r.reqid, reverse=True):
|
||||
review = review_find_last(request, user)
|
||||
if not review:
|
||||
continue
|
||||
|
||||
annotation = origin_annotation_load(review.comment)
|
||||
return OriginInfo(annotation.get('origin'), False)
|
||||
|
||||
# Fallback to searching workaround project.
|
||||
fallback_workaround = config_load(apiurl, target_project).get('fallback-workaround')
|
||||
if fallback_workaround:
|
||||
if project_source_contain(apiurl, fallback_workaround['project'], package, source_hash):
|
||||
return OriginInfo(fallback_workaround['origin'], False)
|
||||
|
||||
# Attempt to find a revision of target package that matches an origin.
|
||||
first = True
|
||||
for source_hash_consider in package_source_hash_history(apiurl, target_project, package):
|
||||
if first:
|
||||
first = False
|
||||
continue
|
||||
|
||||
origin_info = origin_find(
|
||||
apiurl, target_project, package, source_hash_consider, pending_allow=False, fallback=False)
|
||||
if origin_info:
|
||||
return origin_info
|
||||
|
||||
return None
|
||||
|
||||
def origin_annotation_dump(origin_info_new, origin_info_old):
|
||||
data = {'origin': str(origin_info_new.project)}
|
||||
if origin_info_old and origin_info_new.project != origin_info_old.project:
|
||||
data['origin_old'] = str(origin_info_old.project)
|
||||
|
||||
return yaml.dump(data, default_flow_style=False)
|
||||
|
||||
def origin_annotation_load(annotation):
|
||||
# For some reason OBS insists on indenting every subsequent line which
|
||||
# screws up yaml parsing since indentation has meaning.
|
||||
return yaml.safe_load(re.sub(r'^\s+', '', annotation, flags=re.MULTILINE))
|
||||
|
||||
def origin_find_highest(apiurl, project, package):
|
||||
config = config_load(apiurl, project)
|
||||
for origin, values in config_origin_generator(config['origins'], apiurl, project, package, True):
|
||||
if entity_exists(apiurl, origin, package):
|
||||
return origin
|
||||
|
||||
return None
|
||||
|
||||
def policy_evaluate(apiurl, project, package,
|
||||
origin_info_new, origin_info_old,
|
||||
source_hash_new, source_hash_old):
|
||||
if origin_info_new is None:
|
||||
config = config_load(apiurl, project)
|
||||
origins = config_origin_list(config, apiurl, project, package, True)
|
||||
comment = 'Source not found in allowed origins:\n\n- {}'.format('\n- '.join(origins))
|
||||
return PolicyResult(config['unknown_origin_wait'], False, {}, [comment])
|
||||
|
||||
policy = policy_get(apiurl, project, package, origin_info_new.project)
|
||||
inputs = policy_input_calculate(apiurl, project, package,
|
||||
origin_info_new, origin_info_old,
|
||||
source_hash_new, source_hash_old)
|
||||
result = policy_input_evaluate(policy, inputs)
|
||||
|
||||
inputs['pending_submission'] = str(inputs['pending_submission'])
|
||||
logging.debug('policy_evaluate:\n\n{}'.format('\n'.join([
|
||||
'# policy\n{}'.format(yaml.dump(policy, default_flow_style=False)),
|
||||
'# inputs\n{}'.format(yaml.dump(inputs, default_flow_style=False)),
|
||||
str(result)])))
|
||||
return result
|
||||
|
||||
@memoize(session=True)
|
||||
def policy_get(apiurl, project, package, origin):
|
||||
config = config_load(apiurl, project)
|
||||
for key, values in config_origin_generator(config['origins'], apiurl, project, package):
|
||||
if key == origin:
|
||||
return policy_get_preprocess(apiurl, origin, values)
|
||||
|
||||
return None
|
||||
|
||||
def policy_get_preprocess(apiurl, origin, policy):
|
||||
project = origin.rstrip('~')
|
||||
config_project = Config.get(apiurl, project)
|
||||
policy['pending_submission_allowed_reviews'] = filter(None, [
|
||||
config_resolve_variable(v, config_project, 'config_source')
|
||||
for v in policy['pending_submission_allowed_reviews']])
|
||||
|
||||
return policy
|
||||
|
||||
def policy_input_calculate(apiurl, project, package,
|
||||
origin_info_new, origin_info_old,
|
||||
source_hash_new, source_hash_old):
|
||||
inputs = {
|
||||
# Treat no older origin info as new package.
|
||||
'new_package': not entity_exists(apiurl, project, package) or origin_info_old is None,
|
||||
'pending_submission': origin_info_new.pending,
|
||||
}
|
||||
|
||||
if inputs['new_package']:
|
||||
origin_highest = origin_find_highest(apiurl, project, package)
|
||||
inputs['from_highest_priority'] = \
|
||||
origin_highest is None or origin_info_new.project == origin_highest
|
||||
else:
|
||||
workaround_new = origin_workaround_check(origin_info_new.project)
|
||||
inputs['origin_change'] = origin_info_new.project != origin_info_old.project
|
||||
if inputs['origin_change']:
|
||||
config = config_load(apiurl, project)
|
||||
origins = config_origin_list(config, apiurl, project, package)
|
||||
|
||||
inputs['higher_priority'] = \
|
||||
origins.index(origin_info_new.project) < origins.index(origin_info_old.project)
|
||||
if workaround_new:
|
||||
inputs['same_family'] = True
|
||||
else:
|
||||
inputs['same_family'] = \
|
||||
origin_info_new.project in project_list_family(
|
||||
apiurl, origin_info_old.project.rstrip('~'), True)
|
||||
else:
|
||||
inputs['higher_priority'] = None
|
||||
inputs['same_family'] = True
|
||||
|
||||
if inputs['pending_submission']:
|
||||
inputs['direction'] = 'forward'
|
||||
else:
|
||||
if workaround_new:
|
||||
source_hashes = []
|
||||
else:
|
||||
source_hashes = list(package_source_hash_history(
|
||||
apiurl, origin_info_new.project, package, 10, True))
|
||||
|
||||
try:
|
||||
index_new = source_hashes.index(source_hash_new)
|
||||
index_old = source_hashes.index(source_hash_old)
|
||||
if index_new == index_old:
|
||||
inputs['direction'] = 'none'
|
||||
else:
|
||||
inputs['direction'] = 'forward' if index_new < index_old else 'backward'
|
||||
except ValueError:
|
||||
inputs['direction'] = 'unkown'
|
||||
|
||||
return inputs
|
||||
|
||||
def policy_input_evaluate(policy, inputs):
|
||||
result = PolicyResult(False, True, {}, [])
|
||||
|
||||
if inputs['new_package']:
|
||||
if policy['maintainer_review_initial']:
|
||||
result.reviews['maintainer'] = 'Need package maintainer approval for inital submission.'
|
||||
|
||||
if not inputs['from_highest_priority']:
|
||||
result.reviews['fallback'] = 'Not from the highest priority origin which provides the package.'
|
||||
else:
|
||||
if inputs['direction'] == 'none':
|
||||
return PolicyResult(False, False, {}, ['Identical source.'])
|
||||
|
||||
if inputs['origin_change']:
|
||||
if inputs['higher_priority']:
|
||||
if not inputs['same_family'] and inputs['direction'] != 'forward':
|
||||
result.reviews['fallback'] = 'Changing to a higher priority origin, ' \
|
||||
'but from another family and {} direction.'.format(inputs['direction'])
|
||||
elif not inputs['same_family']:
|
||||
result.reviews['fallback'] = 'Changing to a higher priority origin, but from another family.'
|
||||
elif inputs['direction'] != 'forward':
|
||||
result.reviews['fallback'] = \
|
||||
'Changing to a higher priority origin, but {} direction.'.format(inputs['direction'])
|
||||
else:
|
||||
result.reviews['fallback'] = 'Changing to a lower priority origin.'
|
||||
else:
|
||||
if inputs['direction'] == 'forward':
|
||||
if not policy['automatic_updates']:
|
||||
result.reviews['fallback'] = 'Forward direction, but automatic updates not allowed.'
|
||||
else:
|
||||
result.reviews['fallback'] = '{} direction.'.format(inputs['direction'])
|
||||
|
||||
if inputs['pending_submission'] is not False:
|
||||
reviews_not_allowed = policy_input_evaluate_reviews_not_allowed(policy, inputs)
|
||||
wait = not policy['pending_submission_allow'] or len(reviews_not_allowed)
|
||||
result = PolicyResult(wait, True, result.reviews, result.comments)
|
||||
|
||||
if wait:
|
||||
if policy['pending_submission_allow'] and len(reviews_not_allowed):
|
||||
result.comments.append('Waiting on reviews of {}:\n\n- {}'.format(
|
||||
inputs['pending_submission'].identifier, '\n- '.join(reviews_not_allowed)))
|
||||
else:
|
||||
result.comments.append('Waiting on {}.'.format(inputs['pending_submission'].identifier))
|
||||
|
||||
if policy['maintainer_review_always']:
|
||||
# Placed last to override initial maintainer approval message.
|
||||
result.reviews['maintainer'] = 'Need package maintainer approval.'
|
||||
|
||||
for additional_review in policy['additional_reviews']:
|
||||
result.reviews[additional_review] = 'Additional review required based on origin.'
|
||||
|
||||
return result
|
||||
|
||||
def policy_input_evaluate_reviews_not_allowed(policy, inputs):
|
||||
reviews_not_allowed = []
|
||||
for review_remaining in inputs['pending_submission'].reviews_remaining:
|
||||
allowed = False
|
||||
for review_allowed in policy['pending_submission_allowed_reviews']:
|
||||
if review_allowed.endswith('*') and review_remaining.startswith(review_allowed[:-1]):
|
||||
allowed = True
|
||||
break
|
||||
if review_remaining == review_allowed:
|
||||
allowed = True
|
||||
break
|
||||
|
||||
if not allowed:
|
||||
reviews_not_allowed.append(review_remaining)
|
||||
|
||||
return reviews_not_allowed
|
@ -47,6 +47,7 @@ from osc.core import streamfile
|
||||
|
||||
from osclib.cache import Cache
|
||||
from osclib.core import devel_project_get
|
||||
from osclib.core import entity_exists
|
||||
from osclib.core import project_list_prefix
|
||||
from osclib.core import project_pseudometa_file_load
|
||||
from osclib.core import project_pseudometa_file_save
|
||||
@ -1406,15 +1407,7 @@ class StagingAPI(object):
|
||||
:param project: project name to check
|
||||
:param package: optional package to check
|
||||
"""
|
||||
if package:
|
||||
url = self.makeurl(['source', project, package, '_meta'])
|
||||
else:
|
||||
url = self.makeurl(['source', project, '_meta'])
|
||||
try:
|
||||
http_GET(url)
|
||||
except HTTPError:
|
||||
return False
|
||||
return True
|
||||
return entity_exists(self.apiurl, project, package)
|
||||
|
||||
def package_version(self, project, package):
|
||||
"""
|
||||
|
@ -1,37 +1,55 @@
|
||||
from osc import conf
|
||||
from osclib.core import project_list_prefix
|
||||
from osclib.memoize import memoize
|
||||
|
||||
|
||||
def project_list_family(apiurl, project):
|
||||
@memoize(session=True)
|
||||
def project_list_family(apiurl, project, include_update=False):
|
||||
"""
|
||||
Determine the available projects within the same product family.
|
||||
|
||||
Skips < SLE-12 due to format change.
|
||||
"""
|
||||
if project.endswith(':NonFree'):
|
||||
project = project[:-8]
|
||||
project_suffix = ':NonFree'
|
||||
else:
|
||||
project_suffix = ''
|
||||
|
||||
if project == 'openSUSE:Factory':
|
||||
return [project]
|
||||
return [project + project_suffix]
|
||||
|
||||
if project.endswith(':ARM') or project.endswith(':PowerPC'):
|
||||
return [project]
|
||||
return [project + project_suffix]
|
||||
|
||||
count_original = project.count(':')
|
||||
if project.startswith('SUSE:SLE'):
|
||||
project = ':'.join(project.split(':')[:2])
|
||||
family_filter = lambda p: p.count(':') == count_original and p.endswith(':GA')
|
||||
family_filter = lambda p: p.count(':') == count_original and (
|
||||
p.endswith(':GA') or (include_update and p.endswith(':Update')))
|
||||
else:
|
||||
family_filter = lambda p: p.count(':') == count_original
|
||||
family_filter = lambda p: p.count(':') == count_original or (
|
||||
include_update and p.count(':') == count_original + 1 and p.endswith(':Update'))
|
||||
|
||||
prefix = ':'.join(project.split(':')[:-1])
|
||||
projects = project_list_prefix(apiurl, prefix)
|
||||
projects = filter(family_filter, projects)
|
||||
|
||||
return filter(family_filter, projects)
|
||||
if project_suffix:
|
||||
for i, project in enumerate(projects):
|
||||
if project.endswith(':Update'):
|
||||
projects[i] = project.replace(':Update', project_suffix + ':Update')
|
||||
else:
|
||||
projects[i] += project_suffix
|
||||
|
||||
def project_list_family_prior(apiurl, project, include_self=False, last=None):
|
||||
return projects
|
||||
|
||||
def project_list_family_prior(apiurl, project, include_self=False, last=None, include_update=False):
|
||||
"""
|
||||
Determine the available projects within the same product family released
|
||||
prior to the specified project.
|
||||
"""
|
||||
projects = project_list_family(apiurl, project)
|
||||
projects = project_list_family(apiurl, project, include_update)
|
||||
past = False
|
||||
prior = []
|
||||
for entry in sorted(projects, key=project_list_family_sorter, reverse=True):
|
||||
@ -48,6 +66,25 @@ def project_list_family_prior(apiurl, project, include_self=False, last=None):
|
||||
|
||||
return prior
|
||||
|
||||
def project_list_family_prior_pattern(apiurl, project_pattern, project=None, include_update=True):
|
||||
project_prefix, project_suffix = project_pattern.split('*', 2)
|
||||
if project:
|
||||
project = project if project.startswith(project_prefix) else None
|
||||
|
||||
if project:
|
||||
projects = project_list_family_prior(apiurl, project, include_update=include_update)
|
||||
else:
|
||||
if ':Leap:' in project_prefix:
|
||||
project = project_prefix
|
||||
|
||||
if ':SLE-' in project_prefix:
|
||||
project = project_prefix + ':GA'
|
||||
|
||||
projects = project_list_family(apiurl, project, include_update)
|
||||
projects = sorted(projects, key=project_list_family_sorter, reverse=True)
|
||||
|
||||
return [p for p in projects if p.startswith(project_prefix)]
|
||||
|
||||
def project_list_family_sorter(project):
|
||||
"""Extract key to be used as sorter (oldest to newest)."""
|
||||
version = project_version(project)
|
||||
@ -82,7 +119,7 @@ def project_version(project):
|
||||
version += float(parts[2][2:]) / 10
|
||||
return version
|
||||
|
||||
return None
|
||||
return 0
|
||||
|
||||
def mail_send(project, to, subject, body, from_key='maintainer', followup_to_key='release-list', dry=False):
|
||||
from email.mime.text import MIMEText
|
||||
|
11
systemd/osrt-origin-manager.service
Normal file
11
systemd/osrt-origin-manager.service
Normal file
@ -0,0 +1,11 @@
|
||||
[Unit]
|
||||
Description=openSUSE Release Tools: origin-manager
|
||||
|
||||
[Service]
|
||||
User=osrt-origin-manager
|
||||
SyslogIdentifier=osrt-origin-manager
|
||||
ExecStart=/usr/bin/osrt-origin-manager --debug review
|
||||
RuntimeMaxSec=3 hour
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
10
systemd/osrt-origin-manager.timer
Normal file
10
systemd/osrt-origin-manager.timer
Normal file
@ -0,0 +1,10 @@
|
||||
[Unit]
|
||||
Description=openSUSE Release Tools: origin-manager
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120
|
||||
OnUnitInactiveSec=5 min
|
||||
Unit=osrt-origin-manager.service
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
@ -827,6 +827,10 @@ class OBS(object):
|
||||
# /search/
|
||||
#
|
||||
|
||||
@GET('/search/project')
|
||||
def search_project(self, request, uri, headers):
|
||||
return (200, headers, '<collection matches="0"></collection>')
|
||||
|
||||
@GET('/search/project/id')
|
||||
def search_project_id(self, request, uri, headers):
|
||||
"""Return a search result /search/project/id."""
|
||||
|
Loading…
x
Reference in New Issue
Block a user