2015-02-19 10:57:55 +01:00
|
|
|
# Copyright (C) 2015 SUSE Linux GmbH
|
2014-02-12 16:46:54 +01:00
|
|
|
#
|
2015-02-19 10:57:55 +01:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-05-20 11:47:11 +02:00
|
|
|
import json
|
2014-02-12 16:46:54 +01:00
|
|
|
import logging
|
2014-02-18 09:21:29 +01:00
|
|
|
import urllib2
|
2014-03-03 14:54:27 +01:00
|
|
|
import time
|
2015-01-20 15:02:46 +01:00
|
|
|
import re
|
2014-05-20 11:47:11 +02:00
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
|
|
|
|
import yaml
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
from osc import conf
|
2014-02-12 17:48:18 +01:00
|
|
|
from osc import oscerr
|
2014-02-17 14:41:13 +01:00
|
|
|
from osc.core import change_review_state
|
2014-02-12 17:48:18 +01:00
|
|
|
from osc.core import delete_package
|
2015-08-06 13:48:07 +02:00
|
|
|
from osc.core import get_group
|
2014-02-12 17:48:18 +01:00
|
|
|
from osc.core import get_request
|
|
|
|
from osc.core import make_meta_url
|
|
|
|
from osc.core import makeurl
|
|
|
|
from osc.core import http_GET
|
|
|
|
from osc.core import http_POST
|
|
|
|
from osc.core import http_PUT
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-06-04 16:54:21 +02:00
|
|
|
from osclib.comments import CommentAPI
|
2015-07-31 15:12:46 +02:00
|
|
|
from osclib.memoize import memoize
|
2014-02-12 18:19:46 +01:00
|
|
|
|
2014-06-06 16:22:37 +02:00
|
|
|
|
2014-02-12 17:58:19 +01:00
|
|
|
class StagingAPI(object):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
Class containing various api calls to work with staging projects.
|
|
|
|
"""
|
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
def __init__(self, apiurl, project):
|
|
|
|
"""Initialize instance variables."""
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
self.apiurl = apiurl
|
2015-02-19 10:57:55 +01:00
|
|
|
self.project = project
|
|
|
|
|
|
|
|
# Store some prefix / data used in the code.
|
|
|
|
self.cstaging = conf.config[project]['staging']
|
|
|
|
self.cstaging_group = conf.config[project]['staging-group']
|
2015-07-19 17:50:46 +02:00
|
|
|
self.cstaging_archs = conf.config[project]['staging-archs'].split(' ')
|
2015-02-19 10:57:55 +01:00
|
|
|
self.crings = conf.config[project]['rings']
|
|
|
|
self.cnonfree = conf.config[project]['nonfree']
|
|
|
|
self.crebuild = conf.config[project]['rebuild']
|
|
|
|
self.cproduct = conf.config[project]['product']
|
|
|
|
self.copenqa = conf.config[project]['openqa']
|
2015-08-06 13:48:07 +02:00
|
|
|
self.user = conf.get_apiurl_usr(apiurl)
|
2016-05-09 17:32:22 +02:00
|
|
|
self._ring_packages = None
|
|
|
|
self._packages_staged = None
|
2015-07-31 15:12:46 +02:00
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
# If the project support rings, inititialize some variables.
|
|
|
|
if self.crings:
|
|
|
|
self.rings = (
|
|
|
|
'{}:0-Bootstrap'.format(self.crings),
|
|
|
|
'{}:1-MinimalX'.format(self.crings),
|
|
|
|
'{}:2-TestDVD'.format(self.crings)
|
|
|
|
)
|
2016-05-09 17:32:22 +02:00
|
|
|
else:
|
|
|
|
self.rings = []
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2016-05-09 17:32:22 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def ring_packages(self):
|
|
|
|
if self._ring_packages is None:
|
|
|
|
self._ring_packages = self._generate_ring_packages()
|
|
|
|
|
|
|
|
return self._ring_packages
|
|
|
|
|
|
|
|
@ring_packages.setter
|
|
|
|
def ring_packages(self, value):
|
|
|
|
raise Exception("setting ring_packages is not allowed")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def packages_staged(self):
|
|
|
|
if self._packages_staged is None:
|
|
|
|
self._packages_staged = self._get_staged_requests()
|
|
|
|
|
|
|
|
return self._packages_staged
|
|
|
|
|
|
|
|
@packages_staged.setter
|
|
|
|
def packages_staged(self, value):
|
|
|
|
raise Exception("setting packages_staged is not allowed")
|
2015-02-09 17:25:43 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
def makeurl(self, l, query=None):
|
2014-02-24 11:49:46 +01:00
|
|
|
"""
|
|
|
|
Wrapper around osc's makeurl passing our apiurl
|
|
|
|
:return url made for l and query
|
|
|
|
"""
|
2014-02-28 14:32:02 +01:00
|
|
|
query = [] if not query else query
|
2014-02-24 11:49:46 +01:00
|
|
|
return makeurl(self.apiurl, l, query)
|
|
|
|
|
2016-02-18 18:25:51 +08:00
|
|
|
def _retried_request(self, url, func, data=None):
|
2016-02-05 16:25:30 +01:00
|
|
|
retry_sleep_seconds = 1
|
|
|
|
while True:
|
|
|
|
try:
|
2016-02-18 18:25:51 +08:00
|
|
|
if data is not None:
|
|
|
|
return func(url, data=data)
|
2016-02-05 16:25:30 +01:00
|
|
|
return func(url)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
if 500 <= e.code <= 599:
|
|
|
|
print 'Error {}, retrying {} in {}s'.format(e.code, url, retry_sleep_seconds)
|
|
|
|
time.sleep(retry_sleep_seconds)
|
|
|
|
# increase sleep time up to one minute to avoid hammering
|
|
|
|
# the server in case of real problems
|
|
|
|
if (retry_sleep_seconds % 60):
|
|
|
|
retry_sleep_seconds += 1
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
2014-06-03 10:27:13 +02:00
|
|
|
def retried_GET(self, url):
|
2016-02-05 16:25:30 +01:00
|
|
|
return self._retried_request(url, http_GET)
|
2014-06-03 10:27:13 +02:00
|
|
|
|
|
|
|
def retried_POST(self, url):
|
2016-02-05 16:25:30 +01:00
|
|
|
return self._retried_request(url, http_POST)
|
2014-06-03 10:27:13 +02:00
|
|
|
|
|
|
|
def retried_PUT(self, url, data):
|
2016-02-18 18:25:51 +08:00
|
|
|
return self._retried_request(url, http_PUT, data)
|
2014-06-03 10:27:13 +02:00
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def _generate_ring_packages(self):
|
|
|
|
"""
|
|
|
|
Generate dictionary with names of the rings
|
|
|
|
:return dictionary with ring names
|
|
|
|
"""
|
|
|
|
|
2014-02-12 18:19:46 +01:00
|
|
|
ret = {}
|
2014-02-12 16:46:54 +01:00
|
|
|
for prj in self.rings:
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['source', prj])
|
2014-02-12 16:46:54 +01:00
|
|
|
root = http_GET(url)
|
|
|
|
for entry in ET.parse(root).getroot().findall('entry'):
|
2014-05-06 10:12:36 +02:00
|
|
|
pkg = entry.attrib['name']
|
2015-02-19 10:57:55 +01:00
|
|
|
# XXX TODO - Test-DVD-x86_64 is hardcoded here
|
2015-03-17 16:35:04 +01:00
|
|
|
if pkg in ret and not pkg.startswith('Test-DVD-'):
|
2014-06-06 16:22:37 +02:00
|
|
|
msg = '{} is defined in two projects ({} and {})'
|
|
|
|
raise Exception(msg.format(pkg, ret[pkg], prj))
|
2014-05-06 10:12:36 +02:00
|
|
|
ret[pkg] = prj
|
2014-02-12 16:46:54 +01:00
|
|
|
return ret
|
|
|
|
|
2014-03-06 11:43:21 +01:00
|
|
|
def _get_staged_requests(self):
|
|
|
|
"""
|
|
|
|
Get all requests that are already staged
|
|
|
|
:return dict of staged requests with their project and srid
|
|
|
|
"""
|
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
packages_staged = {}
|
2014-03-06 11:43:21 +01:00
|
|
|
for prj in self.get_staging_projects():
|
|
|
|
meta = self.get_prj_pseudometa(prj)
|
|
|
|
for req in meta['requests']:
|
2014-03-07 11:38:05 +01:00
|
|
|
packages_staged[req['package']] = {'prj': prj, 'rq_id': req['id']}
|
2014-03-06 11:43:21 +01:00
|
|
|
|
|
|
|
return packages_staged
|
|
|
|
|
2014-07-03 15:52:10 +02:00
|
|
|
def get_package_information(self, project, pkgname, rev=None):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
Get the revision packagename and source project to copy from
|
|
|
|
based on content provided
|
|
|
|
:param project: the project we are having the package in
|
|
|
|
:param pkgname: name of the package we want to identify
|
|
|
|
:return dict ( project, package, revision, md5sum )
|
|
|
|
"""
|
|
|
|
|
2014-02-12 18:19:46 +01:00
|
|
|
package_info = {}
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-07-03 15:52:10 +02:00
|
|
|
query = {
|
|
|
|
'rev': rev
|
|
|
|
}
|
|
|
|
if rev:
|
|
|
|
url = self.makeurl(['source', project, pkgname], query=query)
|
|
|
|
else:
|
|
|
|
url = self.makeurl(['source', project, pkgname])
|
2014-02-12 16:46:54 +01:00
|
|
|
content = http_GET(url)
|
2014-06-12 18:25:45 +02:00
|
|
|
root = ET.parse(content).getroot()
|
|
|
|
package_info['dir_srcmd5'] = root.attrib['srcmd5']
|
|
|
|
|
|
|
|
linkinfo = root.find('linkinfo')
|
|
|
|
package_info['srcmd5'] = linkinfo.attrib['srcmd5']
|
|
|
|
package_info['rev'] = linkinfo.attrib.get('rev', None)
|
|
|
|
package_info['project'] = linkinfo.attrib['project']
|
|
|
|
package_info['package'] = linkinfo.attrib['package']
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
return package_info
|
|
|
|
|
2015-01-16 15:37:07 +00:00
|
|
|
def get_filelist_for_package(self, pkgname, project, extension=None):
|
|
|
|
"""
|
|
|
|
Get a list of files inside a package container
|
|
|
|
:param package: the base packagename to be linked to
|
|
|
|
:param project: Project to verify
|
|
|
|
:param extension: Limit the file list to files with this extension
|
|
|
|
"""
|
|
|
|
|
|
|
|
filelist = []
|
2015-02-10 17:22:00 +01:00
|
|
|
query = {
|
2015-01-16 15:37:07 +00:00
|
|
|
'extension': extension
|
|
|
|
}
|
|
|
|
|
|
|
|
if extension:
|
|
|
|
url = self.makeurl(['source', project, pkgname], query=query)
|
|
|
|
else:
|
|
|
|
url = self.makeurl(['source', project, pkgname])
|
|
|
|
try:
|
|
|
|
content = http_GET(url)
|
|
|
|
for entry in ET.parse(content).getroot().findall('entry'):
|
|
|
|
filelist.append(entry.attrib['name'])
|
|
|
|
except urllib2.HTTPError, err:
|
|
|
|
if err.code == 404:
|
|
|
|
# The package we were supposed to query does not exist
|
|
|
|
# we can pass this up and return the empty filelist
|
|
|
|
pass
|
|
|
|
|
|
|
|
return filelist
|
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
def move_between_project(self, source_project, req_id,
|
|
|
|
destination_project):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
Move selected package from one staging to another
|
2014-02-13 15:58:12 +01:00
|
|
|
:param source_project: Source project
|
2014-02-17 14:09:15 +01:00
|
|
|
:param request: request to move
|
2014-02-13 15:58:12 +01:00
|
|
|
:param destination_project: Destination project
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
|
2014-02-13 15:58:12 +01:00
|
|
|
# Get the relevant information about source
|
2014-02-15 17:09:11 +01:00
|
|
|
meta = self.get_prj_pseudometa(source_project)
|
2014-02-17 14:28:18 +01:00
|
|
|
found = False
|
|
|
|
for req in meta['requests']:
|
|
|
|
if int(req['id']) == int(req_id):
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if not found:
|
|
|
|
return None
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
# Copy the package
|
2014-02-15 17:09:11 +01:00
|
|
|
self.rq_to_prj(req_id, destination_project)
|
2014-02-13 15:58:12 +01:00
|
|
|
# Delete the old one
|
2014-02-28 14:32:02 +01:00
|
|
|
self.rm_from_prj(source_project, request_id=req_id,
|
|
|
|
msg='Moved to {}'.format(destination_project))
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-03-21 16:59:43 +01:00
|
|
|
# Build disable the old project if empty
|
2014-03-24 15:15:47 +01:00
|
|
|
self.build_switch_staging_project(source_project)
|
2014-03-21 16:59:43 +01:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def get_staging_projects(self):
|
|
|
|
"""
|
|
|
|
Get all current running staging projects
|
|
|
|
:return list of known staging projects
|
|
|
|
"""
|
|
|
|
|
|
|
|
projects = []
|
|
|
|
|
2015-02-19 10:57:55 +01:00
|
|
|
query = "id?match=starts-with(@name,'{}:')".format(self.cstaging)
|
2014-03-03 10:11:23 +01:00
|
|
|
url = self.makeurl(['search', 'project', query])
|
2014-02-12 16:46:54 +01:00
|
|
|
projxml = http_GET(url)
|
|
|
|
root = ET.parse(projxml).getroot()
|
|
|
|
for val in root.findall('project'):
|
|
|
|
projects.append(val.get('name'))
|
|
|
|
return projects
|
|
|
|
|
2015-07-16 15:09:26 +02:00
|
|
|
def is_adi_project(self, p):
|
|
|
|
return ':adi:' in p
|
|
|
|
|
2015-07-18 14:44:41 +02:00
|
|
|
# this function will crash if given a non-adi project name
|
|
|
|
def extract_adi_number(self, p):
|
|
|
|
return int(p.split(':adi:')[1])
|
|
|
|
|
2015-07-16 13:41:53 +02:00
|
|
|
def get_adi_projects(self):
|
|
|
|
"""
|
|
|
|
Get all current running ADI projects
|
|
|
|
:return list of known ADI projects
|
|
|
|
"""
|
|
|
|
|
2015-07-31 15:12:46 +02:00
|
|
|
projects = [p for p in self.get_staging_projects() if self.is_adi_project(p)]
|
2015-07-19 09:32:32 +02:00
|
|
|
return sorted(projects, key=lambda project: self.extract_adi_number(project))
|
2015-07-16 13:41:53 +02:00
|
|
|
|
2014-03-21 16:59:43 +01:00
|
|
|
def do_change_review_state(self, request_id, newstate, message=None,
|
2014-06-04 16:56:56 +02:00
|
|
|
by_group=None, by_user=None, by_project=None):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
Change review state of the staging request
|
2014-02-12 18:19:46 +01:00
|
|
|
:param request_id: id of the request
|
2014-02-12 16:46:54 +01:00
|
|
|
:param newstate: state of the new request
|
|
|
|
:param message: message for the review
|
2014-02-17 16:35:06 +01:00
|
|
|
:param by_group, by_user, by_project: review type
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
2014-02-24 13:30:34 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
message = '' if not message else message
|
|
|
|
|
2014-02-24 13:30:34 +01:00
|
|
|
req = get_request(self.apiurl, str(request_id))
|
|
|
|
if not req:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
2014-02-24 13:30:34 +01:00
|
|
|
|
|
|
|
for review in req.reviews:
|
|
|
|
if review.by_group == by_group and \
|
|
|
|
review.by_user == by_user and \
|
|
|
|
review.by_project == by_project and \
|
|
|
|
review.state == 'new':
|
|
|
|
|
|
|
|
# call osc's function
|
2014-03-03 10:11:23 +01:00
|
|
|
return change_review_state(self.apiurl, str(request_id),
|
|
|
|
newstate,
|
2014-03-21 16:59:43 +01:00
|
|
|
message=message,
|
2014-02-24 13:30:34 +01:00
|
|
|
by_group=by_group,
|
|
|
|
by_user=by_user,
|
|
|
|
by_project=by_project)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def accept_non_ring_request(self, request):
|
|
|
|
"""
|
2014-02-12 18:19:46 +01:00
|
|
|
Accept review of requests that are not yet in any ring so we
|
|
|
|
don't delay their testing.
|
2014-02-12 16:46:54 +01:00
|
|
|
:param request: request to check
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Consolidate all data from request
|
|
|
|
request_id = int(request.get('id'))
|
|
|
|
action = request.findall('action')
|
|
|
|
if not action:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'Request {} has no action'.format(request_id)
|
|
|
|
raise oscerr.WrongArgs(msg)
|
2014-02-12 16:46:54 +01:00
|
|
|
# we care only about first action
|
|
|
|
action = action[0]
|
|
|
|
|
|
|
|
# Where are we targeting the package
|
|
|
|
target_project = action.find('target').get('project')
|
|
|
|
target_package = action.find('target').get('package')
|
|
|
|
|
|
|
|
# If the values are empty it is no error
|
|
|
|
if not target_project or not target_package:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'no target/package in request {}, action {}; '
|
|
|
|
msg = msg.format(request_id, action)
|
|
|
|
logging.info(msg)
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
# Verify the package ring
|
|
|
|
ring = self.ring_packages.get(target_package, None)
|
2014-02-12 18:19:46 +01:00
|
|
|
if not ring:
|
2014-02-12 16:46:54 +01:00
|
|
|
# accept the request here
|
2014-03-06 11:43:21 +01:00
|
|
|
message = 'No need for staging, not in tested ring projects.'
|
2014-03-21 16:59:43 +01:00
|
|
|
self.do_change_review_state(request_id, 'accepted', message=message,
|
2015-02-19 10:57:55 +01:00
|
|
|
by_group=self.cstaging_group)
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2015-10-01 19:09:15 +08:00
|
|
|
def supseded_request(self, request, target_pkgs=None):
|
2014-03-06 11:43:21 +01:00
|
|
|
"""
|
2014-03-07 09:32:48 +01:00
|
|
|
Returns a staging info for a request or None
|
|
|
|
:param request - a Request instance
|
|
|
|
:return dict with 'prj' and 'rq_id' of the old request
|
2014-03-06 11:43:21 +01:00
|
|
|
"""
|
2015-10-01 19:09:15 +08:00
|
|
|
|
|
|
|
if not target_pkgs:
|
|
|
|
target_pkgs = []
|
|
|
|
|
2014-03-06 11:43:21 +01:00
|
|
|
# Consolidate all data from request
|
|
|
|
request_id = int(request.get('id'))
|
|
|
|
action = request.findall('action')
|
|
|
|
if not action:
|
|
|
|
msg = 'Request {} has no action'.format(request_id)
|
|
|
|
raise oscerr.WrongArgs(msg)
|
|
|
|
# we care only about first action
|
|
|
|
action = action[0]
|
|
|
|
|
|
|
|
# Where are we targeting the package
|
|
|
|
target_project = action.find('target').get('project')
|
|
|
|
target_package = action.find('target').get('package')
|
|
|
|
|
|
|
|
# If the values are empty it is no error
|
|
|
|
if not target_project or not target_package:
|
|
|
|
msg = 'no target/package in request {}, action {}; '
|
|
|
|
msg = msg.format(request_id, action)
|
|
|
|
logging.info(msg)
|
|
|
|
|
2015-10-01 19:09:15 +08:00
|
|
|
pkg_do_supersede = True
|
|
|
|
if target_pkgs:
|
|
|
|
if target_package not in target_pkgs:
|
|
|
|
pkg_do_supersede = False
|
|
|
|
|
2014-03-06 11:43:21 +01:00
|
|
|
# If the package is currently tracked then we do the replacement
|
|
|
|
stage_info = self.packages_staged.get(target_package, {'prj': '', 'rq_id': 0})
|
2015-10-01 19:09:15 +08:00
|
|
|
if pkg_do_supersede and int(stage_info['rq_id']) != 0 and int(stage_info['rq_id']) != request_id:
|
2014-03-07 09:32:48 +01:00
|
|
|
return stage_info
|
|
|
|
return None
|
|
|
|
|
2015-10-01 19:09:15 +08:00
|
|
|
def update_superseded_request(self, request, target_pkgs=None):
|
2014-03-07 09:32:48 +01:00
|
|
|
"""
|
|
|
|
Replace superseded requests that are already in some
|
|
|
|
staging prj
|
|
|
|
:param request: request we are checking if it is fine
|
|
|
|
"""
|
2015-10-01 19:09:15 +08:00
|
|
|
if not target_pkgs:
|
|
|
|
target_pkgs = []
|
2014-03-07 09:32:48 +01:00
|
|
|
|
2015-10-01 19:09:15 +08:00
|
|
|
stage_info = self.supseded_request(request, target_pkgs)
|
2014-03-07 15:35:59 +01:00
|
|
|
request_id = int(request.get('id'))
|
|
|
|
|
2014-03-07 09:32:48 +01:00
|
|
|
if stage_info:
|
2014-03-06 11:43:21 +01:00
|
|
|
# Remove the old request
|
2014-03-21 16:59:43 +01:00
|
|
|
self.rm_from_prj(stage_info['prj'],
|
|
|
|
request_id=stage_info['rq_id'],
|
|
|
|
msg='Replaced by newer request',
|
|
|
|
review='declined')
|
2014-03-06 11:43:21 +01:00
|
|
|
# Add the new one that should be replacing it
|
2014-03-06 18:58:14 +01:00
|
|
|
self.rq_to_prj(request_id, stage_info['prj'])
|
2015-11-19 20:23:10 +08:00
|
|
|
return True
|
|
|
|
return False
|
2014-03-06 11:43:21 +01:00
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def get_open_requests(self):
|
|
|
|
"""
|
|
|
|
Get all requests with open review for staging project
|
|
|
|
that are not yet included in any staging project
|
|
|
|
:return list of pending open review requests
|
|
|
|
"""
|
|
|
|
|
|
|
|
requests = []
|
|
|
|
|
|
|
|
# xpath query, using the -m, -r, -s options
|
2015-02-19 10:57:55 +01:00
|
|
|
where = "@by_group='{}'+and+@state='new'".format(self.cstaging_group)
|
|
|
|
projects = [format(self.project)]
|
|
|
|
if self.cnonfree:
|
|
|
|
projects.append(self.cnonfree)
|
|
|
|
targets = ["target[@project='{}']".format(p) for p in projects]
|
|
|
|
|
|
|
|
query = "match=state/@name='review'+and+review[{}]+and+({})".format(
|
|
|
|
where, '+or+'.join(targets))
|
2014-03-03 10:11:23 +01:00
|
|
|
url = self.makeurl(['search', 'request'], query)
|
2014-02-12 16:46:54 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
for rq in root.findall('request'):
|
|
|
|
requests.append(rq)
|
|
|
|
return requests
|
|
|
|
|
2015-10-01 19:09:15 +08:00
|
|
|
def dispatch_open_requests(self, packages=None):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
2014-03-03 10:11:23 +01:00
|
|
|
Verify all requests and dispatch them to staging projects or
|
|
|
|
approve them
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
|
2015-10-01 19:09:15 +08:00
|
|
|
if not packages:
|
|
|
|
packages = []
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
# get all current pending requests
|
|
|
|
requests = self.get_open_requests()
|
|
|
|
# check if we can reduce it down by accepting some
|
|
|
|
for rq in requests:
|
2015-07-31 15:12:46 +02:00
|
|
|
# if self.crings:
|
|
|
|
# self.accept_non_ring_request(rq)
|
2015-10-01 19:09:15 +08:00
|
|
|
self.update_superseded_request(rq, packages)
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2015-08-28 10:46:04 +02:00
|
|
|
@memoize(ttl=60, session=True, add_invalidate=True)
|
2014-02-12 16:46:54 +01:00
|
|
|
def get_prj_pseudometa(self, project):
|
|
|
|
"""
|
|
|
|
Gets project data from YAML in project description
|
|
|
|
:param project: project to read data from
|
|
|
|
:return structured object with metadata
|
|
|
|
"""
|
|
|
|
|
|
|
|
url = make_meta_url('prj', project, self.apiurl)
|
2014-02-17 12:03:07 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
2014-02-12 16:46:54 +01:00
|
|
|
description = root.find('description')
|
|
|
|
# If YAML parsing fails, load default
|
|
|
|
# FIXME: Better handling of errors
|
|
|
|
# * broken description
|
|
|
|
# * directly linked packages
|
|
|
|
# * removed linked packages
|
|
|
|
try:
|
|
|
|
data = yaml.load(description.text)
|
2014-07-17 13:55:45 +02:00
|
|
|
except (TypeError, AttributeError):
|
|
|
|
data = {}
|
|
|
|
# make sure we have a requests field
|
|
|
|
data['requests'] = data.get('requests', [])
|
2014-02-12 16:46:54 +01:00
|
|
|
return data
|
|
|
|
|
|
|
|
def set_prj_pseudometa(self, project, meta):
|
|
|
|
"""
|
|
|
|
Sets project description to the YAML of the provided object
|
|
|
|
:param project: project to save into
|
|
|
|
:param meta: data to save
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Get current metadata
|
|
|
|
url = make_meta_url('prj', project, self.apiurl)
|
2014-06-04 16:56:56 +02:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-02-12 16:46:54 +01:00
|
|
|
# Find description
|
|
|
|
description = root.find('description')
|
2014-03-07 11:38:05 +01:00
|
|
|
# Order the requests and replace it with yaml
|
|
|
|
meta['requests'] = sorted(meta['requests'], key=lambda x: x['id'])
|
2014-02-12 16:46:54 +01:00
|
|
|
description.text = yaml.dump(meta)
|
|
|
|
# Find title
|
|
|
|
title = root.find('title')
|
|
|
|
# Put something nice into title as well
|
|
|
|
new_title = []
|
|
|
|
for request in meta['requests']:
|
|
|
|
new_title.append(request['package'])
|
2014-02-13 11:55:17 +01:00
|
|
|
nt = ', '.join(sorted(new_title))
|
|
|
|
title.text = nt[:240]
|
2014-02-12 16:46:54 +01:00
|
|
|
# Write XML back
|
2014-02-28 14:32:02 +01:00
|
|
|
url = make_meta_url('prj', project, self.apiurl, force=True)
|
2014-07-17 13:55:45 +02:00
|
|
|
http_PUT(url, data=ET.tostring(root))
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2015-07-31 15:12:46 +02:00
|
|
|
# Invalidate here the cache for this stating project
|
|
|
|
self._invalidate_get_prj_pseudometa(project)
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def _add_rq_to_prj_pseudometa(self, project, request_id, package):
|
|
|
|
"""
|
|
|
|
Records request as part of the project within metadata
|
|
|
|
:param project: project to record into
|
|
|
|
:param request_id: request id to record
|
|
|
|
:param package: package the request is about
|
|
|
|
"""
|
|
|
|
|
|
|
|
data = self.get_prj_pseudometa(project)
|
|
|
|
append = True
|
|
|
|
for request in data['requests']:
|
|
|
|
if request['package'] == package:
|
2014-06-04 18:15:33 +02:00
|
|
|
# Only update if needed (to save calls to get_request)
|
|
|
|
if request['id'] != request_id or not request.get('author'):
|
|
|
|
request['id'] = request_id
|
|
|
|
request['author'] = get_request(self.apiurl, str(request_id)).get_creator()
|
2014-02-12 16:46:54 +01:00
|
|
|
append = False
|
|
|
|
if append:
|
2014-06-04 18:15:33 +02:00
|
|
|
author = get_request(self.apiurl, str(request_id)).get_creator()
|
2014-06-06 16:22:37 +02:00
|
|
|
data['requests'].append({'id': request_id, 'package': package, 'author': author})
|
2014-02-12 16:46:54 +01:00
|
|
|
self.set_prj_pseudometa(project, data)
|
|
|
|
|
2014-02-18 13:30:39 +01:00
|
|
|
def get_request_id_for_package(self, project, package):
|
|
|
|
"""
|
|
|
|
Query the request id from meta
|
2014-03-06 11:43:21 +01:00
|
|
|
:param project: project the package is in
|
2014-02-18 13:30:39 +01:00
|
|
|
:param package: package we want to query for
|
|
|
|
"""
|
|
|
|
data = self.get_prj_pseudometa(project)
|
|
|
|
for x in data['requests']:
|
|
|
|
if x['package'] == package:
|
|
|
|
return int(x['id'])
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_package_for_request_id(self, project, request_id):
|
|
|
|
"""
|
|
|
|
Query the request id from meta
|
2014-03-06 11:43:21 +01:00
|
|
|
:param project: project the package is in
|
2014-02-18 13:30:39 +01:00
|
|
|
:param package: package we want to query for
|
|
|
|
"""
|
|
|
|
data = self.get_prj_pseudometa(project)
|
2014-03-04 18:21:59 +01:00
|
|
|
request_id = int(request_id)
|
2014-02-18 13:30:39 +01:00
|
|
|
for x in data['requests']:
|
|
|
|
if x['id'] == request_id:
|
|
|
|
return x['package']
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _remove_package_from_prj_pseudometa(self, project, package):
|
2014-02-12 16:28:52 +01:00
|
|
|
"""
|
|
|
|
Delete request from the project pseudometa
|
|
|
|
:param project: project to remove from
|
|
|
|
:param package: package we want to remove from meta
|
|
|
|
"""
|
|
|
|
|
|
|
|
data = self.get_prj_pseudometa(project)
|
2014-02-13 13:20:44 +01:00
|
|
|
data['requests'] = filter(lambda x: x['package'] != package, data['requests'])
|
2014-02-15 17:09:11 +01:00
|
|
|
self.set_prj_pseudometa(project, data)
|
2014-02-12 16:28:52 +01:00
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
def rm_from_prj(self, project, package=None, request_id=None,
|
|
|
|
msg=None, review='accepted'):
|
2014-02-13 15:58:12 +01:00
|
|
|
"""
|
|
|
|
Delete request from the project
|
|
|
|
:param project: project to remove from
|
2014-02-18 13:30:39 +01:00
|
|
|
:param request_id: request we want to remove
|
2014-02-13 15:58:12 +01:00
|
|
|
:param msg: message for the log
|
2014-03-21 16:59:43 +01:00
|
|
|
:param review: review state for the review, defautl accepted
|
2014-02-13 15:58:12 +01:00
|
|
|
"""
|
2014-02-25 16:37:54 +01:00
|
|
|
|
2014-03-04 16:32:15 +01:00
|
|
|
if not request_id:
|
2014-02-18 13:30:39 +01:00
|
|
|
request_id = self.get_request_id_for_package(project, package)
|
2014-03-04 16:32:15 +01:00
|
|
|
if not package:
|
2014-02-18 13:30:39 +01:00
|
|
|
package = self.get_package_for_request_id(project, request_id)
|
2014-03-04 16:32:15 +01:00
|
|
|
if not package or not request_id:
|
|
|
|
return
|
2014-02-13 15:58:12 +01:00
|
|
|
|
2014-02-18 13:30:39 +01:00
|
|
|
self._remove_package_from_prj_pseudometa(project, package)
|
2014-05-08 15:05:27 +02:00
|
|
|
subprj = self.map_ring_package_to_subject(project, package)
|
|
|
|
delete_package(self.apiurl, subprj, package, force=True, msg=msg)
|
2014-06-17 13:57:19 +02:00
|
|
|
|
|
|
|
for sub_prj, sub_pkg in self.get_sub_packages(package):
|
|
|
|
sub_prj = self.map_ring_package_to_subject(project, sub_pkg)
|
2014-07-31 14:16:28 +02:00
|
|
|
if sub_prj != subprj: # if different to the main package's prj
|
2014-06-18 08:48:41 +02:00
|
|
|
delete_package(self.apiurl, sub_prj, sub_pkg, force=True, msg=msg)
|
2014-06-17 13:57:19 +02:00
|
|
|
|
2014-03-06 11:43:36 +01:00
|
|
|
self.set_review(request_id, project, state=review, msg=msg)
|
2014-02-12 16:28:52 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
def create_package_container(self, project, package, disable_build=False):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
2014-02-13 13:27:29 +01:00
|
|
|
Creates a package container without any fields in project/package
|
|
|
|
:param project: project to create it
|
|
|
|
:param package: package name
|
2014-03-03 10:11:23 +01:00
|
|
|
:param disable_build: should the package be created with build
|
|
|
|
flag disabled
|
2014-02-13 13:27:29 +01:00
|
|
|
"""
|
2014-03-03 10:11:23 +01:00
|
|
|
dst_meta = '<package name="{}"><title/><description/></package>'
|
|
|
|
dst_meta = dst_meta.format(package)
|
2014-02-13 13:27:29 +01:00
|
|
|
if disable_build:
|
|
|
|
root = ET.fromstring(dst_meta)
|
|
|
|
elm = ET.SubElement(root, 'build')
|
|
|
|
ET.SubElement(elm, 'disable')
|
2014-02-13 14:18:16 +01:00
|
|
|
dst_meta = ET.tostring(root)
|
2014-02-13 13:27:29 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['source', project, package, '_meta'])
|
2014-02-13 13:27:29 +01:00
|
|
|
http_PUT(url, data=dst_meta)
|
|
|
|
|
2014-03-04 15:20:05 +01:00
|
|
|
def check_ring_packages(self, project, requests):
|
|
|
|
"""
|
|
|
|
Checks if packages from requests are in some ring or not
|
|
|
|
:param project: project to check
|
|
|
|
:param requests: list of requests to verify
|
|
|
|
:return True (has ring packages) / False (has no ring packages)
|
|
|
|
"""
|
|
|
|
|
|
|
|
for request in requests:
|
|
|
|
pkg = self.get_package_for_request_id(project, request)
|
|
|
|
if pkg in self.ring_packages:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-07-31 14:16:28 +02:00
|
|
|
def check_project_status(self, project):
|
2014-02-17 12:03:07 +01:00
|
|
|
"""
|
2014-07-31 14:16:28 +02:00
|
|
|
Checks a staging project for acceptance. Use the JSON document
|
|
|
|
for staging project to base the decision.
|
2014-02-17 12:03:07 +01:00
|
|
|
:param project: project to check
|
2014-03-03 10:11:23 +01:00
|
|
|
:return true (ok)/false (empty prj) or list of strings with
|
|
|
|
informations)
|
2014-03-06 11:46:44 +01:00
|
|
|
|
2014-07-31 14:16:28 +02:00
|
|
|
"""
|
2015-02-19 10:57:55 +01:00
|
|
|
_prefix = '{}:'.format(self.cstaging)
|
2014-07-31 14:16:28 +02:00
|
|
|
if project.startswith(_prefix):
|
|
|
|
project = project.replace(_prefix, '')
|
2014-09-12 15:00:14 +02:00
|
|
|
|
|
|
|
query = {'format': 'json'}
|
2015-02-19 10:57:55 +01:00
|
|
|
url = self.makeurl(('project', 'staging_projects', self.project, project),
|
2014-09-12 15:00:14 +02:00
|
|
|
query=query)
|
2014-07-31 14:16:28 +02:00
|
|
|
result = json.load(self.retried_GET(url))
|
2015-09-30 05:47:53 +02:00
|
|
|
return result and result['overall_state'] == 'acceptable'
|
2014-05-08 11:57:37 +02:00
|
|
|
|
2014-03-03 14:54:27 +01:00
|
|
|
def days_since_last_freeze(self, project):
|
|
|
|
"""
|
|
|
|
Checks the last update for the frozen links
|
|
|
|
:param project: project to check
|
|
|
|
:return age in days(float) of the last update
|
|
|
|
"""
|
2014-06-04 16:56:56 +02:00
|
|
|
url = self.makeurl(['source', project, '_project'], {'meta': '1'})
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-03-03 14:54:27 +01:00
|
|
|
for entry in root.findall('entry'):
|
|
|
|
if entry.get('name') == '_frozenlinks':
|
|
|
|
return (time.time() - float(entry.get('mtime')))/3600/24
|
2014-03-05 13:55:00 +01:00
|
|
|
return 100000 # quite some!
|
2014-03-03 14:54:27 +01:00
|
|
|
|
2014-02-13 14:38:30 +01:00
|
|
|
def rq_to_prj(self, request_id, project):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
2014-02-13 14:38:30 +01:00
|
|
|
Links request to project - delete or submit
|
2014-02-12 16:46:54 +01:00
|
|
|
:param request_id: request to link
|
|
|
|
:param project: project to link into
|
|
|
|
"""
|
|
|
|
# read info from sr
|
2014-02-13 14:38:30 +01:00
|
|
|
tar_pkg = None
|
|
|
|
|
2014-02-15 17:09:11 +01:00
|
|
|
req = get_request(self.apiurl, str(request_id))
|
2014-02-12 16:46:54 +01:00
|
|
|
if not req:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
2014-02-13 14:38:30 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
act = req.get_actions('submit')
|
2014-02-13 14:38:30 +01:00
|
|
|
if act:
|
2014-02-14 21:35:59 +01:00
|
|
|
tar_pkg = self.submit_to_prj(act[0], project)
|
2014-02-13 14:38:30 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
act = req.get_actions('delete')
|
2014-02-13 14:38:30 +01:00
|
|
|
if act:
|
|
|
|
tar_pkg = self.delete_to_prj(act[0], project)
|
|
|
|
|
|
|
|
if not tar_pkg:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'Request {} is not a submit or delete request'
|
|
|
|
msg = msg.format(request_id)
|
|
|
|
raise oscerr.WrongArgs(msg)
|
2014-02-13 14:38:30 +01:00
|
|
|
|
|
|
|
# register the package name
|
|
|
|
self._add_rq_to_prj_pseudometa(project, int(request_id), tar_pkg)
|
|
|
|
|
2014-02-17 14:41:13 +01:00
|
|
|
# add review
|
2014-02-17 16:09:30 +01:00
|
|
|
self.add_review(request_id, project)
|
2014-02-17 14:41:13 +01:00
|
|
|
|
2014-02-17 16:35:06 +01:00
|
|
|
# now remove the staging checker
|
2014-03-21 16:59:43 +01:00
|
|
|
self.do_change_review_state(request_id, 'accepted',
|
2015-02-19 10:57:55 +01:00
|
|
|
by_group=self.cstaging_group,
|
2014-06-04 16:56:56 +02:00
|
|
|
message='Picked {}'.format(project))
|
2014-03-04 15:00:25 +01:00
|
|
|
return True
|
2014-02-17 16:35:06 +01:00
|
|
|
|
2014-05-08 13:59:57 +02:00
|
|
|
def map_ring_package_to_subject(self, project, pkg):
|
|
|
|
"""
|
|
|
|
Returns the subproject (if any) to use for the pkg depending on the ring
|
|
|
|
the package is in
|
|
|
|
:param project the staging prj
|
|
|
|
:param pkg the package to add
|
|
|
|
"""
|
|
|
|
# it's actually a pretty stupid algorithm, but it might become more complex later
|
|
|
|
|
2014-06-18 21:52:08 +02:00
|
|
|
if project.endswith(':DVD'):
|
2014-07-07 16:59:40 +02:00
|
|
|
return project # not yet
|
2014-06-18 21:52:08 +02:00
|
|
|
|
2015-03-02 15:54:42 +08:00
|
|
|
ring_dvd = '{}:2-TestDVD'.format(self.crings)
|
2014-08-07 12:39:41 +02:00
|
|
|
if self.ring_packages.get(pkg) == ring_dvd:
|
2015-11-24 19:35:54 +08:00
|
|
|
if not self.item_exists(project + ":DVD") and self.item_exists(project, pkg):
|
|
|
|
# assuming it is in adi staging, workaround for https://progress.opensuse.org/issues/9646
|
|
|
|
return project
|
|
|
|
else:
|
|
|
|
return project + ":DVD"
|
2014-05-08 13:59:57 +02:00
|
|
|
|
|
|
|
return project
|
|
|
|
|
2014-08-20 15:36:19 +02:00
|
|
|
def get_sub_packages(self, pkg, project=None):
|
2014-06-17 13:49:15 +02:00
|
|
|
"""
|
2014-08-20 15:36:19 +02:00
|
|
|
Returns a list of packages that need to be linked into rings
|
|
|
|
too. A package is actually a tuple of project and package name
|
2014-06-17 13:49:15 +02:00
|
|
|
"""
|
|
|
|
ret = []
|
2014-08-20 15:36:19 +02:00
|
|
|
if not project:
|
|
|
|
project = self.ring_packages.get(pkg)
|
2014-06-17 13:49:15 +02:00
|
|
|
if not project:
|
|
|
|
return ret
|
|
|
|
url = self.makeurl(['source', project, pkg],
|
|
|
|
{'cmd': 'showlinked'})
|
|
|
|
|
|
|
|
# showlinked is a POST for rather bizzare reasons
|
|
|
|
f = http_POST(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
for pkg in root.findall('package'):
|
|
|
|
ret.append((pkg.get('project'), pkg.get('name')))
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
def create_and_wipe_package(self, project, package):
|
|
|
|
"""
|
|
|
|
Helper function for delete requests
|
|
|
|
"""
|
|
|
|
# create build disabled package
|
|
|
|
self.create_package_container(project, package, disable_build=True)
|
|
|
|
|
|
|
|
# now trigger wipebinaries to emulate a delete
|
|
|
|
url = self.makeurl(['build', project],
|
|
|
|
{'cmd': 'wipe', 'package': package})
|
|
|
|
http_POST(url)
|
|
|
|
|
2014-02-13 14:38:30 +01:00
|
|
|
def delete_to_prj(self, act, project):
|
|
|
|
"""
|
|
|
|
Hides Package in project
|
|
|
|
:param act: action for delete request
|
|
|
|
:param project: project to hide in
|
|
|
|
"""
|
|
|
|
|
|
|
|
tar_pkg = act.tgt_package
|
2014-05-08 13:59:57 +02:00
|
|
|
project = self.map_ring_package_to_subject(project, tar_pkg)
|
2014-06-17 13:49:15 +02:00
|
|
|
self.create_and_wipe_package(project, tar_pkg)
|
2014-02-13 14:38:30 +01:00
|
|
|
|
2014-06-17 13:49:15 +02:00
|
|
|
for sub_prj, sub_pkg in self.get_sub_packages(tar_pkg):
|
|
|
|
sub_prj = self.map_ring_package_to_subject(project, sub_pkg)
|
|
|
|
self.create_and_wipe_package(sub_prj, sub_pkg)
|
2014-02-13 14:38:30 +01:00
|
|
|
|
2014-06-17 13:57:19 +02:00
|
|
|
# create a link so unselect can find it
|
|
|
|
root = ET.Element('link', package=tar_pkg, project=project)
|
|
|
|
url = self.makeurl(['source', sub_prj, sub_pkg, '_link'])
|
|
|
|
http_PUT(url, data=ET.tostring(root))
|
|
|
|
|
2014-02-13 14:38:30 +01:00
|
|
|
return tar_pkg
|
|
|
|
|
2014-08-19 10:22:32 +02:00
|
|
|
def submit_to_prj(self, act, project, force_enable_build=False):
|
2014-02-13 14:38:30 +01:00
|
|
|
"""
|
|
|
|
Links sources from request to project
|
|
|
|
:param act: action for submit request
|
|
|
|
:param project: project to link into
|
2014-08-19 10:22:32 +02:00
|
|
|
:param force_enable_build: overwrite the ring criteria to enable
|
|
|
|
or disable the build
|
2014-02-13 14:38:30 +01:00
|
|
|
"""
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
src_prj = act.src_project
|
|
|
|
src_rev = act.src_rev
|
|
|
|
src_pkg = act.src_package
|
|
|
|
tar_pkg = act.tgt_package
|
|
|
|
|
2014-02-19 12:11:56 +01:00
|
|
|
disable_build = False
|
2014-08-19 10:22:32 +02:00
|
|
|
# The force_enable_build will avoid the
|
|
|
|
# map_ring_package_to_subproject
|
|
|
|
if not force_enable_build:
|
2015-07-16 15:09:26 +02:00
|
|
|
if self.crings and not self.ring_packages.get(tar_pkg) and not self.is_adi_project(project):
|
2014-08-19 10:22:32 +02:00
|
|
|
disable_build = True
|
|
|
|
else:
|
|
|
|
project = self.map_ring_package_to_subject(project, tar_pkg)
|
2014-05-08 13:59:57 +02:00
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
self.create_package_container(project, tar_pkg,
|
|
|
|
disable_build=disable_build)
|
2014-02-13 13:27:29 +01:00
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
# expand the revision to a md5
|
2014-03-03 10:11:23 +01:00
|
|
|
url = self.makeurl(['source', src_prj, src_pkg],
|
|
|
|
{'rev': src_rev, 'expand': 1})
|
2014-02-12 16:46:54 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
2014-02-28 14:32:02 +01:00
|
|
|
src_rev = root.attrib['srcmd5']
|
2014-02-18 14:36:32 +01:00
|
|
|
src_vrev = root.attrib.get('vrev')
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
# link stuff - not using linkpac because linkpac copies meta
|
|
|
|
# from source
|
|
|
|
root = ET.Element('link', package=src_pkg, project=src_prj,
|
|
|
|
rev=src_rev)
|
2014-02-18 14:36:32 +01:00
|
|
|
if src_vrev:
|
|
|
|
root.attrib['vrev'] = src_vrev
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['source', project, tar_pkg, '_link'])
|
2014-02-13 13:27:29 +01:00
|
|
|
http_PUT(url, data=ET.tostring(root))
|
2014-06-17 13:49:15 +02:00
|
|
|
|
|
|
|
for sub_prj, sub_pkg in self.get_sub_packages(tar_pkg):
|
|
|
|
sub_prj = self.map_ring_package_to_subject(project, sub_pkg)
|
2014-07-07 16:59:40 +02:00
|
|
|
# print project, tar_pkg, sub_pkg, sub_prj
|
|
|
|
if sub_prj == project: # skip inner-project links
|
2014-06-17 13:49:15 +02:00
|
|
|
continue
|
|
|
|
self.create_package_container(sub_prj, sub_pkg)
|
|
|
|
|
|
|
|
root = ET.Element('link', package=tar_pkg, project=project)
|
|
|
|
url = self.makeurl(['source', sub_prj, sub_pkg, '_link'])
|
|
|
|
http_PUT(url, data=ET.tostring(root))
|
|
|
|
|
2014-02-13 14:38:30 +01:00
|
|
|
return tar_pkg
|
2014-02-17 12:03:07 +01:00
|
|
|
|
|
|
|
def prj_from_letter(self, letter):
|
2014-02-28 14:32:02 +01:00
|
|
|
if ':' in letter: # not a letter
|
2014-02-17 12:03:07 +01:00
|
|
|
return letter
|
2015-02-19 10:57:55 +01:00
|
|
|
return '{}:{}'.format(self.cstaging, letter)
|
2014-02-17 12:03:07 +01:00
|
|
|
|
2015-07-16 13:41:53 +02:00
|
|
|
def adi_prj_from_number(self, number):
|
|
|
|
if ':' in str(number):
|
|
|
|
return number
|
|
|
|
return '{}:adi:{}'.format(self.cstaging, number)
|
|
|
|
|
2014-02-17 12:03:07 +01:00
|
|
|
def list_requests_in_prj(self, project):
|
|
|
|
where = "@by_project='%s'+and+@state='new'" % project
|
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['search', 'request', 'id'],
|
|
|
|
"match=state/@name='review'+and+review[%s]" % where)
|
2014-02-17 12:03:07 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
list = []
|
|
|
|
for rq in root.findall('request'):
|
|
|
|
list.append(int(rq.get('id')))
|
|
|
|
|
|
|
|
return list
|
2014-02-17 14:41:13 +01:00
|
|
|
|
2014-02-20 14:22:33 +01:00
|
|
|
def add_review(self, request_id, by_project=None, by_group=None, msg=None):
|
2014-02-17 14:41:13 +01:00
|
|
|
"""
|
|
|
|
Adds review by project to the request
|
|
|
|
:param request_id: request to add review to
|
|
|
|
:param project: project to assign review to
|
|
|
|
"""
|
|
|
|
req = get_request(self.apiurl, str(request_id))
|
|
|
|
if not req:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
2014-02-17 14:41:13 +01:00
|
|
|
for i in req.reviews:
|
2014-02-20 11:43:28 +01:00
|
|
|
if by_project and i.by_project == by_project and i.state == 'new':
|
2014-02-17 14:41:13 +01:00
|
|
|
return
|
2014-02-20 11:43:28 +01:00
|
|
|
if by_group and i.by_group == by_group and i.state == 'new':
|
|
|
|
return
|
|
|
|
|
2014-03-24 14:46:50 +01:00
|
|
|
# don't try to change reviews if the request is dead
|
2014-06-04 16:56:56 +02:00
|
|
|
if req.state.name not in ('new', 'review'):
|
2014-03-24 14:46:50 +01:00
|
|
|
return
|
|
|
|
|
2014-02-20 14:22:33 +01:00
|
|
|
query = {}
|
2014-02-20 11:43:28 +01:00
|
|
|
if by_project:
|
|
|
|
query['by_project'] = by_project
|
2014-02-20 14:22:33 +01:00
|
|
|
if not msg:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'Being evaluated by staging project "{}"'
|
|
|
|
msg = msg.format(by_project)
|
2014-02-20 11:43:28 +01:00
|
|
|
if by_group:
|
|
|
|
query['by_group'] = by_group
|
2014-02-20 14:22:33 +01:00
|
|
|
if not msg:
|
2014-02-28 14:32:02 +01:00
|
|
|
msg = 'Being evaluated by group "{}"'.format(by_group)
|
2014-03-04 15:35:52 +01:00
|
|
|
if not query:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('We need a group or a project')
|
2014-02-20 14:22:33 +01:00
|
|
|
query['cmd'] = 'addreview'
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['request', str(request_id)], query)
|
2014-02-20 11:43:28 +01:00
|
|
|
http_POST(url, data=msg)
|
2014-02-17 14:41:13 +01:00
|
|
|
|
2014-03-06 11:43:36 +01:00
|
|
|
def set_review(self, request_id, project, state='accepted', msg=None):
|
2014-02-17 14:41:13 +01:00
|
|
|
"""
|
|
|
|
Sets review for request done by project
|
|
|
|
:param request_id: request to change review for
|
|
|
|
:param project: project to do the review
|
|
|
|
"""
|
|
|
|
req = get_request(self.apiurl, str(request_id))
|
|
|
|
if not req:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
2014-03-06 11:43:36 +01:00
|
|
|
# don't try to change reviews if the request is dead
|
2014-06-04 16:56:56 +02:00
|
|
|
if req.state.name not in ('new', 'review'):
|
2014-03-06 11:43:36 +01:00
|
|
|
return
|
2014-02-17 14:41:13 +01:00
|
|
|
cont = False
|
|
|
|
for i in req.reviews:
|
|
|
|
if i.by_project == project and i.state == 'new':
|
|
|
|
cont = True
|
2014-03-06 11:43:36 +01:00
|
|
|
if not cont:
|
|
|
|
return
|
|
|
|
if not msg:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'Reviewed by staging project "{}" with result: "{}"'
|
|
|
|
msg = msg.format(project, state)
|
2014-03-21 16:59:43 +01:00
|
|
|
self.do_change_review_state(request_id, state, by_project=project,
|
2014-06-04 16:56:56 +02:00
|
|
|
message=msg)
|
2014-02-19 11:48:16 +01:00
|
|
|
|
2015-08-28 14:49:15 +02:00
|
|
|
def get_flag_in_prj(self, project, flag='build', repository=None, arch=None):
|
|
|
|
"""Return the flag value in a project."""
|
|
|
|
url = self.makeurl(['source', project, '_meta'])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
section = root.find(flag)
|
|
|
|
for status in section:
|
|
|
|
is_repository = status.get('repository', None) == repository
|
|
|
|
is_arch = status.get('arch', None) == arch
|
|
|
|
if is_repository and is_arch:
|
|
|
|
return status.tag
|
|
|
|
|
2014-05-27 15:38:51 +02:00
|
|
|
def switch_flag_in_prj(self, project, flag='build', state='disable', repository=None, arch=None):
|
2014-03-06 18:20:35 +01:00
|
|
|
url = self.makeurl(['source', project, '_meta'])
|
2014-02-19 11:48:16 +01:00
|
|
|
prjmeta = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
2014-05-27 15:38:51 +02:00
|
|
|
flagxml = prjmeta.find(flag)
|
2014-06-04 16:56:56 +02:00
|
|
|
if not flagxml: # appending is fine
|
2014-05-27 15:38:51 +02:00
|
|
|
flagxml = ET.SubElement(prjmeta, flag)
|
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
foundone = False
|
2014-05-27 15:38:51 +02:00
|
|
|
for build in flagxml:
|
|
|
|
if build.get('repository', None) == repository and build.get('arch', None) == arch:
|
2014-03-06 18:20:35 +01:00
|
|
|
build.tag = state
|
2014-02-19 11:48:16 +01:00
|
|
|
foundone = True
|
|
|
|
|
|
|
|
# need to add a global one
|
|
|
|
if not foundone:
|
2014-05-27 15:38:51 +02:00
|
|
|
query = {}
|
|
|
|
if arch:
|
|
|
|
query['arch'] = arch
|
|
|
|
if repository:
|
|
|
|
query['repository'] = repository
|
|
|
|
ET.SubElement(flagxml, state, query)
|
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
http_PUT(url, data=ET.tostring(prjmeta))
|
2014-03-03 17:18:18 +01:00
|
|
|
|
2014-05-27 15:38:51 +02:00
|
|
|
def build_switch_prj(self, project, state):
|
|
|
|
"""
|
|
|
|
Switch build state of project to desired state
|
|
|
|
:param project: project to switch state for
|
|
|
|
:param state: desired state for build
|
|
|
|
"""
|
|
|
|
self.switch_flag_in_prj(project, flag='build', state=state, repository=None, arch=None)
|
|
|
|
|
2014-03-03 17:18:18 +01:00
|
|
|
def prj_frozen_enough(self, project):
|
|
|
|
"""
|
|
|
|
Check if we can and should refreeze the prj"
|
|
|
|
:param project the project to check
|
|
|
|
:returns True if we can select into it
|
|
|
|
"""
|
|
|
|
|
|
|
|
data = self.get_prj_pseudometa(project)
|
|
|
|
if data['requests']:
|
2014-03-05 13:55:00 +01:00
|
|
|
return True # already has content
|
2014-03-03 17:18:18 +01:00
|
|
|
|
|
|
|
# young enough
|
|
|
|
if self.days_since_last_freeze(project) < 6.5:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2014-03-24 15:15:47 +01:00
|
|
|
|
|
|
|
def build_switch_staging_project(self, target_project):
|
|
|
|
"""
|
|
|
|
Verify what packages are in project and switch the build
|
|
|
|
accordingly.
|
|
|
|
:param target_project: project we validate and switch
|
|
|
|
"""
|
|
|
|
meta = self.get_prj_pseudometa(target_project)
|
|
|
|
staged_requests = list()
|
|
|
|
for request in meta['requests']:
|
|
|
|
staged_requests.append(request['id'])
|
2014-05-08 13:29:38 +02:00
|
|
|
target_flag = 'disable'
|
2015-07-27 11:13:23 +02:00
|
|
|
# for adi projects we always build
|
|
|
|
if self.is_adi_project(target_project):
|
|
|
|
target_flag = 'enable'
|
|
|
|
elif not self.crings or self.check_ring_packages(target_project, staged_requests):
|
2014-05-08 13:29:38 +02:00
|
|
|
target_flag = 'enable'
|
|
|
|
self.build_switch_prj(target_project, target_flag)
|
|
|
|
|
2015-01-19 21:41:02 +01:00
|
|
|
if self.item_exists(target_project + ":DVD"):
|
2014-05-08 13:29:38 +02:00
|
|
|
self.build_switch_prj(target_project + ":DVD", target_flag)
|
2014-06-04 16:33:11 +02:00
|
|
|
|
2015-01-19 21:41:02 +01:00
|
|
|
def item_exists(self, project, package=None):
|
2014-05-06 11:19:49 +02:00
|
|
|
"""
|
|
|
|
Return true if the given project exists
|
|
|
|
:param project: project name to check
|
2015-01-19 21:41:02 +01:00
|
|
|
:param package: optional package to check
|
2014-05-06 11:19:49 +02:00
|
|
|
"""
|
2015-01-19 21:41:02 +01:00
|
|
|
if package:
|
|
|
|
url = self.makeurl(['source', project, package, '_meta'])
|
|
|
|
else:
|
|
|
|
url = self.makeurl(['source', project, '_meta'])
|
2014-05-06 11:19:49 +02:00
|
|
|
try:
|
2014-06-04 16:33:11 +02:00
|
|
|
http_GET(url)
|
2014-05-06 11:19:49 +02:00
|
|
|
except urllib2.HTTPError:
|
|
|
|
return False
|
|
|
|
return True
|
2014-06-04 16:54:21 +02:00
|
|
|
|
2015-01-20 00:13:03 +01:00
|
|
|
def package_version(self, project, package):
|
|
|
|
"""
|
|
|
|
Return the version of a package, None in case the package does not exist
|
|
|
|
The first non-commented Version: tag found is used.
|
|
|
|
:param project: the project the package resides in
|
|
|
|
:param package: the package to check
|
|
|
|
:param product: if passed, the package to be checked is considered to be part of _product
|
|
|
|
"""
|
|
|
|
if not self.item_exists(project, package):
|
|
|
|
return None
|
|
|
|
|
|
|
|
version = None
|
|
|
|
|
2015-01-20 13:06:48 +01:00
|
|
|
specfile = self.load_file_content(project, package, '{}.spec'.format(package))
|
2015-01-20 00:13:03 +01:00
|
|
|
if specfile:
|
|
|
|
try:
|
2015-02-10 17:22:00 +01:00
|
|
|
version = re.findall('^Version:(.*)', specfile, re.MULTILINE)[0].strip()
|
2015-01-20 15:02:46 +01:00
|
|
|
except IndexError:
|
2015-01-20 00:13:03 +01:00
|
|
|
pass
|
|
|
|
return version
|
|
|
|
|
2015-11-26 11:20:36 +01:00
|
|
|
def get_binary_version(self, project, rpm, repository='standard', arch='x86_64'):
|
|
|
|
"""
|
|
|
|
Return the version of a built rpm file
|
|
|
|
"""
|
|
|
|
url = self.makeurl(['build', project, repository, arch, '_repository', "%s?view=fileinfo" % rpm])
|
2016-03-25 18:53:12 +08:00
|
|
|
try:
|
|
|
|
return ET.parse(http_GET(url)).getroot().find('version').text
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
return None
|
2015-11-26 11:20:36 +01:00
|
|
|
|
2015-01-20 00:13:03 +01:00
|
|
|
def load_file_content(self, project, package, filename):
|
|
|
|
"""
|
|
|
|
Load the content of a file and return the content as data. If the package is a link, it will be expanded
|
|
|
|
:param project: The project to query
|
|
|
|
:param package: The package to quert
|
|
|
|
:param filename: The filename to query
|
|
|
|
"""
|
2015-01-20 13:06:48 +01:00
|
|
|
url = self.makeurl(['source', project, package, '{}?expand=1'.format(filename)])
|
2015-01-20 00:13:03 +01:00
|
|
|
try:
|
|
|
|
return http_GET(url).read()
|
2015-01-20 15:02:46 +01:00
|
|
|
except urllib2.HTTPError:
|
2015-01-20 00:13:03 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
def save_file_content(self, project, package, filename, content):
|
|
|
|
"""
|
|
|
|
Save content to a project/package/file
|
|
|
|
:param project: The project containing the package
|
|
|
|
:param package: the package to update
|
|
|
|
:param filename: the filename to save the data to
|
|
|
|
:param content: the content to write to the file
|
|
|
|
"""
|
2015-01-20 13:06:48 +01:00
|
|
|
url = self.makeurl(['source', project, package, filename])
|
2015-01-20 00:13:03 +01:00
|
|
|
http_PUT(url + '?comment=scripted+update', data=content)
|
2015-01-19 21:41:02 +01:00
|
|
|
|
2014-06-04 16:54:21 +02:00
|
|
|
def update_status_comments(self, project, command):
|
|
|
|
"""
|
|
|
|
Refresh the status comments, used for notification purposes, based on
|
|
|
|
the current list of requests. To ensure that all involved users
|
|
|
|
(and nobody else) get notified, old status comments are deleted and
|
|
|
|
a new one is created.
|
|
|
|
:param project: project name
|
|
|
|
:param command: name of the command to include in the message
|
|
|
|
"""
|
|
|
|
|
|
|
|
# TODO: we need to discuss the best way to keep track of status
|
2014-06-04 18:34:08 +02:00
|
|
|
# comments. Right now they are marked with an initial markdown
|
|
|
|
# comment. Maybe a cleaner approach would be to store something
|
2014-06-04 16:54:21 +02:00
|
|
|
# like 'last_status_comment_id' in the pseudometa. But the current
|
|
|
|
# OBS API for adding comments doesn't return the id of the created
|
|
|
|
# comment.
|
|
|
|
|
|
|
|
comment_api = CommentAPI(self.apiurl)
|
|
|
|
|
|
|
|
comments = comment_api.get_comments(project_name=project)
|
|
|
|
for comment in comments.values():
|
|
|
|
# TODO: update the comment removing the user mentions instead of
|
|
|
|
# deleting the whole comment. But there is currently not call in
|
|
|
|
# OBS API to update a comment
|
2014-06-04 18:34:08 +02:00
|
|
|
if comment['comment'].startswith('<!--- osc staging'):
|
2014-06-04 16:54:21 +02:00
|
|
|
comment_api.delete(comment['id'])
|
2014-06-06 16:22:37 +02:00
|
|
|
break # There can be only one! (if we keep deleting them)
|
2014-06-04 16:54:21 +02:00
|
|
|
|
|
|
|
meta = self.get_prj_pseudometa(project)
|
2014-06-04 18:34:08 +02:00
|
|
|
lines = ['<!--- osc staging %s --->' % command]
|
2014-06-04 16:54:21 +02:00
|
|
|
lines.append('The list of requests tracked in %s has changed:\n' % project)
|
|
|
|
for req in meta['requests']:
|
2014-06-04 18:03:18 +02:00
|
|
|
author = req.get('autor', None)
|
|
|
|
if not author:
|
2014-06-04 16:54:21 +02:00
|
|
|
# Old style metadata
|
|
|
|
author = get_request(self.apiurl, str(req['id'])).get_creator()
|
2014-06-27 09:50:08 +02:00
|
|
|
lines.append(' * Request#%s for package %s submitted by @%s' % (req['id'], req['package'], author))
|
2014-06-04 16:54:21 +02:00
|
|
|
msg = '\n'.join(lines)
|
|
|
|
comment_api.add_comment(project_name=project, comment=msg)
|
2014-07-17 13:55:45 +02:00
|
|
|
|
|
|
|
def mark_additional_packages(self, project, packages):
|
|
|
|
"""
|
|
|
|
Adds packages that the repo checker needs to download from staging prj
|
|
|
|
"""
|
|
|
|
meta = self.get_prj_pseudometa(project)
|
|
|
|
additionals = set(meta.get('add_to_repo', []))
|
|
|
|
additionals.update(packages)
|
|
|
|
meta['add_to_repo'] = sorted(additionals)
|
|
|
|
self.set_prj_pseudometa(project, meta)
|
2015-02-02 15:02:57 +01:00
|
|
|
|
|
|
|
def get_prj_results(self, prj, arch):
|
|
|
|
url = self.makeurl(['build', prj, 'standard', arch, "_jobhistory?code=lastfailures"])
|
|
|
|
results = []
|
|
|
|
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
|
|
|
xmllines = root.findall("./jobhist")
|
|
|
|
|
|
|
|
for pkg in xmllines:
|
|
|
|
if pkg.attrib['code'] == 'failed':
|
|
|
|
results.append(pkg.attrib['package'])
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2015-11-26 11:20:36 +01:00
|
|
|
def is_repo_dirty(self, project, repository):
|
|
|
|
url = self.makeurl(['build', project, '_result?code=broken&repository=%s' % repository])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
for repo in root.findall('result'):
|
|
|
|
repostate = repo.get('state', 'missing')
|
|
|
|
if repostate not in ['unpublished', 'published'] or repo.get('dirty', 'false') == 'true':
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2016-05-31 15:05:36 +02:00
|
|
|
def list_packages(self, project):
|
|
|
|
url = self.makeurl(['source', project])
|
2015-02-02 15:02:57 +01:00
|
|
|
pkglist = []
|
|
|
|
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
xmllines = root.findall("./entry")
|
|
|
|
for pkg in xmllines:
|
2016-05-31 15:05:36 +02:00
|
|
|
pkglist.append(pkg.attrib['name'])
|
2015-02-02 15:02:57 +01:00
|
|
|
|
|
|
|
return pkglist
|
|
|
|
|
2016-05-31 15:05:36 +02:00
|
|
|
def check_pkgs(self, rebuild_list):
|
|
|
|
return list(set(rebuild_list) & set(self.list_packages(self.project)))
|
|
|
|
|
2015-02-10 17:22:00 +01:00
|
|
|
def rebuild_pkg(self, package, prj, arch, code=None):
|
|
|
|
query = {
|
|
|
|
'cmd': 'rebuild',
|
|
|
|
'arch': arch
|
|
|
|
}
|
2015-02-02 15:02:57 +01:00
|
|
|
if package:
|
|
|
|
query['package'] = package
|
|
|
|
pkg = query['package']
|
|
|
|
|
|
|
|
u = self.makeurl(['build', prj], query=query)
|
|
|
|
|
|
|
|
try:
|
|
|
|
print "tried to trigger rebuild for project '%s' package '%s'" % (prj, pkg)
|
2015-02-10 17:22:00 +01:00
|
|
|
http_POST(u)
|
2015-02-02 15:02:57 +01:00
|
|
|
except:
|
|
|
|
print "could not trigger rebuild for project '%s' package '%s'" % (prj, pkg)
|
2015-07-16 13:41:53 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _candidate_adi_project(self):
|
|
|
|
"""Decide a candidate name for an ADI project."""
|
2015-07-19 09:32:32 +02:00
|
|
|
adi_projects = self.get_adi_projects()
|
2015-07-16 15:09:26 +02:00
|
|
|
adi_index = 1
|
2015-07-16 13:41:53 +02:00
|
|
|
for i, project in enumerate(adi_projects):
|
2015-07-16 15:09:26 +02:00
|
|
|
adi_index = i + 1
|
|
|
|
if not project.endswith(str(adi_index)):
|
|
|
|
return self.adi_prj_from_number(adi_index)
|
|
|
|
adi_index = i + 2
|
|
|
|
return self.adi_prj_from_number(adi_index)
|
2015-07-16 13:41:53 +02:00
|
|
|
|
|
|
|
def create_adi_project(self, name):
|
|
|
|
"""Create an ADI project."""
|
|
|
|
if not name:
|
|
|
|
name = self._candidate_adi_project()
|
|
|
|
else:
|
|
|
|
name = self.adi_prj_from_number(name)
|
|
|
|
|
|
|
|
adi_projects = self.get_adi_projects()
|
|
|
|
if name in adi_projects:
|
|
|
|
raise Exception('Project {} already exist'.format(name))
|
|
|
|
|
|
|
|
meta = """
|
|
|
|
<project name="{}">
|
|
|
|
<title></title>
|
|
|
|
<description></description>
|
|
|
|
<publish>
|
|
|
|
<disable/>
|
|
|
|
</publish>
|
|
|
|
<debuginfo>
|
|
|
|
<enable/>
|
|
|
|
</debuginfo>
|
|
|
|
<repository name="standard">
|
|
|
|
<path project="{}" repository="standard"/>
|
|
|
|
<arch>x86_64</arch>
|
|
|
|
</repository>
|
|
|
|
</project>""".format(name, self.project)
|
2015-07-16 15:09:26 +02:00
|
|
|
url = make_meta_url('prj', name, self.apiurl)
|
|
|
|
http_PUT(url, data=meta)
|
|
|
|
# put twice because on first put, the API adds useless maintainer
|
2015-07-16 13:41:53 +02:00
|
|
|
http_PUT(url, data=meta)
|
2015-07-16 15:09:26 +02:00
|
|
|
|
|
|
|
return name
|
2015-08-06 13:48:07 +02:00
|
|
|
|
|
|
|
def is_user_member_of(self, user, group):
|
|
|
|
root = ET.fromstring(get_group(self.apiurl, group))
|
|
|
|
|
|
|
|
if root.findall("./person/person[@userid='%s']" % user):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|