2014-02-12 16:46:54 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
|
|
|
# (C) 2014 mhrusecky@suse.cz, openSUSE.org
|
|
|
|
# (C) 2014 tchvatal@suse.cz, openSUSE.org
|
|
|
|
# Distribute under GPLv2 or GPLv3
|
|
|
|
|
2014-05-20 11:47:11 +02:00
|
|
|
import json
|
2014-02-12 16:46:54 +01:00
|
|
|
import logging
|
2014-02-17 15:13:27 +01:00
|
|
|
import re
|
2014-02-18 09:21:29 +01:00
|
|
|
import urllib2
|
2014-03-03 14:54:27 +01:00
|
|
|
import time
|
2014-05-20 11:47:11 +02:00
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
|
|
|
|
import yaml
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-02-12 17:48:18 +01:00
|
|
|
from osc import oscerr
|
2014-02-17 14:41:13 +01:00
|
|
|
from osc.core import change_review_state
|
2014-02-12 17:48:18 +01:00
|
|
|
from osc.core import delete_package
|
|
|
|
from osc.core import get_request
|
|
|
|
from osc.core import make_meta_url
|
|
|
|
from osc.core import makeurl
|
|
|
|
from osc.core import metafile
|
|
|
|
from osc.core import http_GET
|
|
|
|
from osc.core import http_POST
|
|
|
|
from osc.core import http_PUT
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-06-04 16:54:21 +02:00
|
|
|
from osclib.comments import CommentAPI
|
2014-02-12 18:19:46 +01:00
|
|
|
|
2014-06-06 16:22:37 +02:00
|
|
|
|
2014-02-12 17:58:19 +01:00
|
|
|
class StagingAPI(object):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
Class containing various api calls to work with staging projects.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, apiurl):
|
|
|
|
"""
|
2014-02-12 17:48:18 +01:00
|
|
|
Initialize instance variables
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
self.apiurl = apiurl
|
2014-06-04 16:56:56 +02:00
|
|
|
self.rings = (
|
|
|
|
'openSUSE:Factory:Rings:0-Bootstrap',
|
|
|
|
'openSUSE:Factory:Rings:1-MinimalX',
|
|
|
|
'openSUSE:Factory:Rings:2-TestDVD'
|
|
|
|
)
|
2014-02-12 16:46:54 +01:00
|
|
|
self.ring_packages = self._generate_ring_packages()
|
2014-03-06 11:43:21 +01:00
|
|
|
self.packages_staged = self._get_staged_requests()
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
def makeurl(self, l, query=None):
|
2014-02-24 11:49:46 +01:00
|
|
|
"""
|
|
|
|
Wrapper around osc's makeurl passing our apiurl
|
|
|
|
:return url made for l and query
|
|
|
|
"""
|
2014-02-28 14:32:02 +01:00
|
|
|
query = [] if not query else query
|
2014-02-24 11:49:46 +01:00
|
|
|
return makeurl(self.apiurl, l, query)
|
|
|
|
|
2014-06-03 10:27:13 +02:00
|
|
|
def retried_GET(self, url):
|
|
|
|
try:
|
|
|
|
return http_GET(url)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
if e.code / 100 == 5:
|
2014-06-04 16:56:56 +02:00
|
|
|
print 'Retrying {}'.format(url)
|
2014-06-03 11:29:17 +02:00
|
|
|
return self.retried_GET(url)
|
2014-06-03 10:27:13 +02:00
|
|
|
raise e
|
|
|
|
|
|
|
|
def retried_POST(self, url):
|
|
|
|
try:
|
|
|
|
return http_POST(url)
|
|
|
|
except urllib2.HTTPError, e:
|
2014-06-05 10:27:39 +02:00
|
|
|
if e.code == 504:
|
2014-06-06 16:22:37 +02:00
|
|
|
print 'Timeout on {}'.format(url)
|
|
|
|
return '<status code="timeout"/>'
|
2014-06-03 10:27:13 +02:00
|
|
|
if e.code / 100 == 5:
|
2014-06-04 16:56:56 +02:00
|
|
|
print 'Retrying {}'.format(url)
|
2014-06-03 11:29:17 +02:00
|
|
|
return self.retried_POST(url)
|
2014-06-03 10:27:13 +02:00
|
|
|
raise e
|
|
|
|
|
|
|
|
def retried_PUT(self, url, data):
|
|
|
|
try:
|
|
|
|
return http_PUT(url, data=data)
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
if e.code / 100 == 5:
|
2014-06-04 16:56:56 +02:00
|
|
|
print 'Retrying {}'.format(url)
|
2014-06-03 11:29:17 +02:00
|
|
|
return self.retried_PUT(url, data)
|
2014-06-03 10:27:13 +02:00
|
|
|
raise e
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def _generate_ring_packages(self):
|
|
|
|
"""
|
|
|
|
Generate dictionary with names of the rings
|
|
|
|
:return dictionary with ring names
|
|
|
|
"""
|
|
|
|
|
2014-02-12 18:19:46 +01:00
|
|
|
ret = {}
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
for prj in self.rings:
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['source', prj])
|
2014-02-12 16:46:54 +01:00
|
|
|
root = http_GET(url)
|
|
|
|
for entry in ET.parse(root).getroot().findall('entry'):
|
2014-05-06 10:12:36 +02:00
|
|
|
pkg = entry.attrib['name']
|
2014-05-08 13:26:27 +02:00
|
|
|
if pkg in ret and pkg != 'Test-DVD-x86_64':
|
2014-06-06 16:22:37 +02:00
|
|
|
msg = '{} is defined in two projects ({} and {})'
|
|
|
|
raise Exception(msg.format(pkg, ret[pkg], prj))
|
2014-05-06 10:12:36 +02:00
|
|
|
ret[pkg] = prj
|
2014-02-12 16:46:54 +01:00
|
|
|
return ret
|
|
|
|
|
2014-03-06 11:43:21 +01:00
|
|
|
def _get_staged_requests(self):
|
|
|
|
"""
|
|
|
|
Get all requests that are already staged
|
|
|
|
:return dict of staged requests with their project and srid
|
|
|
|
"""
|
|
|
|
|
|
|
|
packages_staged = dict()
|
|
|
|
for prj in self.get_staging_projects():
|
|
|
|
meta = self.get_prj_pseudometa(prj)
|
|
|
|
for req in meta['requests']:
|
2014-03-07 11:38:05 +01:00
|
|
|
packages_staged[req['package']] = {'prj': prj, 'rq_id': req['id']}
|
2014-03-06 11:43:21 +01:00
|
|
|
|
|
|
|
return packages_staged
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def get_package_information(self, project, pkgname):
|
|
|
|
"""
|
|
|
|
Get the revision packagename and source project to copy from
|
|
|
|
based on content provided
|
|
|
|
:param project: the project we are having the package in
|
|
|
|
:param pkgname: name of the package we want to identify
|
|
|
|
:return dict ( project, package, revision, md5sum )
|
|
|
|
"""
|
|
|
|
|
2014-02-12 18:19:46 +01:00
|
|
|
package_info = {}
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['source', project, pkgname])
|
2014-02-12 16:46:54 +01:00
|
|
|
content = http_GET(url)
|
2014-06-12 18:25:45 +02:00
|
|
|
root = ET.parse(content).getroot()
|
|
|
|
package_info['dir_srcmd5'] = root.attrib['srcmd5']
|
|
|
|
|
|
|
|
linkinfo = root.find('linkinfo')
|
|
|
|
package_info['srcmd5'] = linkinfo.attrib['srcmd5']
|
|
|
|
package_info['rev'] = linkinfo.attrib.get('rev', None)
|
|
|
|
package_info['project'] = linkinfo.attrib['project']
|
|
|
|
package_info['package'] = linkinfo.attrib['package']
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
return package_info
|
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
def move_between_project(self, source_project, req_id,
|
|
|
|
destination_project):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
Move selected package from one staging to another
|
2014-02-13 15:58:12 +01:00
|
|
|
:param source_project: Source project
|
2014-02-17 14:09:15 +01:00
|
|
|
:param request: request to move
|
2014-02-13 15:58:12 +01:00
|
|
|
:param destination_project: Destination project
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
|
2014-02-13 15:58:12 +01:00
|
|
|
# Get the relevant information about source
|
2014-02-15 17:09:11 +01:00
|
|
|
meta = self.get_prj_pseudometa(source_project)
|
2014-02-17 14:28:18 +01:00
|
|
|
found = False
|
|
|
|
for req in meta['requests']:
|
|
|
|
if int(req['id']) == int(req_id):
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if not found:
|
|
|
|
return None
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
# Copy the package
|
2014-02-15 17:09:11 +01:00
|
|
|
self.rq_to_prj(req_id, destination_project)
|
2014-02-13 15:58:12 +01:00
|
|
|
# Delete the old one
|
2014-02-28 14:32:02 +01:00
|
|
|
self.rm_from_prj(source_project, request_id=req_id,
|
|
|
|
msg='Moved to {}'.format(destination_project))
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-03-21 16:59:43 +01:00
|
|
|
# Build disable the old project if empty
|
2014-03-24 15:15:47 +01:00
|
|
|
self.build_switch_staging_project(source_project)
|
2014-03-21 16:59:43 +01:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def get_staging_projects(self):
|
|
|
|
"""
|
|
|
|
Get all current running staging projects
|
|
|
|
:return list of known staging projects
|
|
|
|
"""
|
|
|
|
|
|
|
|
projects = []
|
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
query = "id?match=starts-with(@name,'openSUSE:Factory:Staging:')"
|
|
|
|
url = self.makeurl(['search', 'project', query])
|
2014-02-12 16:46:54 +01:00
|
|
|
projxml = http_GET(url)
|
|
|
|
root = ET.parse(projxml).getroot()
|
|
|
|
for val in root.findall('project'):
|
|
|
|
projects.append(val.get('name'))
|
|
|
|
return projects
|
|
|
|
|
2014-03-21 16:59:43 +01:00
|
|
|
def do_change_review_state(self, request_id, newstate, message=None,
|
2014-06-04 16:56:56 +02:00
|
|
|
by_group=None, by_user=None, by_project=None):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
Change review state of the staging request
|
2014-02-12 18:19:46 +01:00
|
|
|
:param request_id: id of the request
|
2014-02-12 16:46:54 +01:00
|
|
|
:param newstate: state of the new request
|
|
|
|
:param message: message for the review
|
2014-02-17 16:35:06 +01:00
|
|
|
:param by_group, by_user, by_project: review type
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
2014-02-24 13:30:34 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
message = '' if not message else message
|
|
|
|
|
2014-02-24 13:30:34 +01:00
|
|
|
req = get_request(self.apiurl, str(request_id))
|
|
|
|
if not req:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
2014-02-24 13:30:34 +01:00
|
|
|
|
|
|
|
for review in req.reviews:
|
|
|
|
if review.by_group == by_group and \
|
|
|
|
review.by_user == by_user and \
|
|
|
|
review.by_project == by_project and \
|
|
|
|
review.state == 'new':
|
|
|
|
|
|
|
|
# call osc's function
|
2014-03-03 10:11:23 +01:00
|
|
|
return change_review_state(self.apiurl, str(request_id),
|
|
|
|
newstate,
|
2014-03-21 16:59:43 +01:00
|
|
|
message=message,
|
2014-02-24 13:30:34 +01:00
|
|
|
by_group=by_group,
|
|
|
|
by_user=by_user,
|
|
|
|
by_project=by_project)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def accept_non_ring_request(self, request):
|
|
|
|
"""
|
2014-02-12 18:19:46 +01:00
|
|
|
Accept review of requests that are not yet in any ring so we
|
|
|
|
don't delay their testing.
|
2014-02-12 16:46:54 +01:00
|
|
|
:param request: request to check
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Consolidate all data from request
|
|
|
|
request_id = int(request.get('id'))
|
|
|
|
action = request.findall('action')
|
|
|
|
if not action:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'Request {} has no action'.format(request_id)
|
|
|
|
raise oscerr.WrongArgs(msg)
|
2014-02-12 16:46:54 +01:00
|
|
|
# we care only about first action
|
|
|
|
action = action[0]
|
|
|
|
|
|
|
|
# Where are we targeting the package
|
|
|
|
target_project = action.find('target').get('project')
|
|
|
|
target_package = action.find('target').get('package')
|
|
|
|
|
|
|
|
# If the values are empty it is no error
|
|
|
|
if not target_project or not target_package:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'no target/package in request {}, action {}; '
|
|
|
|
msg = msg.format(request_id, action)
|
|
|
|
logging.info(msg)
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
# Verify the package ring
|
|
|
|
ring = self.ring_packages.get(target_package, None)
|
2014-02-12 18:19:46 +01:00
|
|
|
if not ring:
|
2014-02-12 16:46:54 +01:00
|
|
|
# accept the request here
|
2014-03-06 11:43:21 +01:00
|
|
|
message = 'No need for staging, not in tested ring projects.'
|
2014-03-21 16:59:43 +01:00
|
|
|
self.do_change_review_state(request_id, 'accepted', message=message,
|
2014-05-06 10:12:36 +02:00
|
|
|
by_group='factory-staging')
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-03-07 09:32:48 +01:00
|
|
|
def supseded_request(self, request):
|
2014-03-06 11:43:21 +01:00
|
|
|
"""
|
2014-03-07 09:32:48 +01:00
|
|
|
Returns a staging info for a request or None
|
|
|
|
:param request - a Request instance
|
|
|
|
:return dict with 'prj' and 'rq_id' of the old request
|
2014-03-06 11:43:21 +01:00
|
|
|
"""
|
|
|
|
# Consolidate all data from request
|
|
|
|
request_id = int(request.get('id'))
|
|
|
|
action = request.findall('action')
|
|
|
|
if not action:
|
|
|
|
msg = 'Request {} has no action'.format(request_id)
|
|
|
|
raise oscerr.WrongArgs(msg)
|
|
|
|
# we care only about first action
|
|
|
|
action = action[0]
|
|
|
|
|
|
|
|
# Where are we targeting the package
|
|
|
|
target_project = action.find('target').get('project')
|
|
|
|
target_package = action.find('target').get('package')
|
|
|
|
|
|
|
|
# If the values are empty it is no error
|
|
|
|
if not target_project or not target_package:
|
|
|
|
msg = 'no target/package in request {}, action {}; '
|
|
|
|
msg = msg.format(request_id, action)
|
|
|
|
logging.info(msg)
|
|
|
|
|
|
|
|
# If the package is currently tracked then we do the replacement
|
|
|
|
stage_info = self.packages_staged.get(target_package, {'prj': '', 'rq_id': 0})
|
|
|
|
if stage_info['rq_id'] != 0 and int(stage_info['rq_id']) != request_id:
|
2014-03-07 09:32:48 +01:00
|
|
|
return stage_info
|
|
|
|
return None
|
|
|
|
|
|
|
|
def update_superseded_request(self, request):
|
|
|
|
"""
|
|
|
|
Replace superseded requests that are already in some
|
|
|
|
staging prj
|
|
|
|
:param request: request we are checking if it is fine
|
|
|
|
"""
|
|
|
|
|
|
|
|
stage_info = self.supseded_request(request)
|
2014-03-07 15:35:59 +01:00
|
|
|
request_id = int(request.get('id'))
|
|
|
|
|
2014-03-07 09:32:48 +01:00
|
|
|
if stage_info:
|
2014-03-06 11:43:21 +01:00
|
|
|
# Remove the old request
|
2014-03-21 16:59:43 +01:00
|
|
|
self.rm_from_prj(stage_info['prj'],
|
|
|
|
request_id=stage_info['rq_id'],
|
|
|
|
msg='Replaced by newer request',
|
|
|
|
review='declined')
|
2014-03-06 11:43:21 +01:00
|
|
|
# Add the new one that should be replacing it
|
2014-03-06 18:58:14 +01:00
|
|
|
self.rq_to_prj(request_id, stage_info['prj'])
|
2014-03-06 11:43:21 +01:00
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
def get_open_requests(self):
|
|
|
|
"""
|
|
|
|
Get all requests with open review for staging project
|
|
|
|
that are not yet included in any staging project
|
|
|
|
:return list of pending open review requests
|
|
|
|
"""
|
|
|
|
|
|
|
|
requests = []
|
|
|
|
|
|
|
|
# xpath query, using the -m, -r, -s options
|
|
|
|
where = "@by_group='factory-staging'+and+@state='new'"
|
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
query = "match=state/@name='review'+and+review[{}]".format(where)
|
|
|
|
url = self.makeurl(['search', 'request'], query)
|
2014-02-12 16:46:54 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
|
|
|
|
for rq in root.findall('request'):
|
|
|
|
requests.append(rq)
|
|
|
|
return requests
|
|
|
|
|
|
|
|
def dispatch_open_requests(self):
|
|
|
|
"""
|
2014-03-03 10:11:23 +01:00
|
|
|
Verify all requests and dispatch them to staging projects or
|
|
|
|
approve them
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
# get all current pending requests
|
|
|
|
requests = self.get_open_requests()
|
|
|
|
# check if we can reduce it down by accepting some
|
|
|
|
for rq in requests:
|
|
|
|
self.accept_non_ring_request(rq)
|
2014-03-06 11:43:21 +01:00
|
|
|
self.update_superseded_request(rq)
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
def get_prj_pseudometa(self, project):
|
|
|
|
"""
|
|
|
|
Gets project data from YAML in project description
|
|
|
|
:param project: project to read data from
|
|
|
|
:return structured object with metadata
|
|
|
|
"""
|
|
|
|
|
|
|
|
url = make_meta_url('prj', project, self.apiurl)
|
2014-02-17 12:03:07 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
2014-02-12 16:46:54 +01:00
|
|
|
description = root.find('description')
|
|
|
|
# If YAML parsing fails, load default
|
|
|
|
# FIXME: Better handling of errors
|
|
|
|
# * broken description
|
|
|
|
# * directly linked packages
|
|
|
|
# * removed linked packages
|
|
|
|
try:
|
|
|
|
data = yaml.load(description.text)
|
|
|
|
data['requests']
|
|
|
|
except:
|
|
|
|
data = yaml.load('requests: []')
|
|
|
|
return data
|
|
|
|
|
|
|
|
def set_prj_pseudometa(self, project, meta):
|
|
|
|
"""
|
|
|
|
Sets project description to the YAML of the provided object
|
|
|
|
:param project: project to save into
|
|
|
|
:param meta: data to save
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Get current metadata
|
|
|
|
url = make_meta_url('prj', project, self.apiurl)
|
2014-06-04 16:56:56 +02:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-02-12 16:46:54 +01:00
|
|
|
# Find description
|
|
|
|
description = root.find('description')
|
2014-03-07 11:38:05 +01:00
|
|
|
# Order the requests and replace it with yaml
|
|
|
|
meta['requests'] = sorted(meta['requests'], key=lambda x: x['id'])
|
2014-02-12 16:46:54 +01:00
|
|
|
description.text = yaml.dump(meta)
|
|
|
|
# Find title
|
|
|
|
title = root.find('title')
|
|
|
|
# Put something nice into title as well
|
|
|
|
new_title = []
|
|
|
|
for request in meta['requests']:
|
|
|
|
new_title.append(request['package'])
|
2014-02-13 11:55:17 +01:00
|
|
|
nt = ', '.join(sorted(new_title))
|
|
|
|
title.text = nt[:240]
|
2014-02-12 16:46:54 +01:00
|
|
|
# Write XML back
|
2014-02-28 14:32:02 +01:00
|
|
|
url = make_meta_url('prj', project, self.apiurl, force=True)
|
2014-02-12 16:46:54 +01:00
|
|
|
f = metafile(url, ET.tostring(root))
|
|
|
|
http_PUT(f.url, file=f.filename)
|
|
|
|
|
|
|
|
def _add_rq_to_prj_pseudometa(self, project, request_id, package):
|
|
|
|
"""
|
|
|
|
Records request as part of the project within metadata
|
|
|
|
:param project: project to record into
|
|
|
|
:param request_id: request id to record
|
|
|
|
:param package: package the request is about
|
|
|
|
"""
|
|
|
|
|
|
|
|
data = self.get_prj_pseudometa(project)
|
|
|
|
append = True
|
|
|
|
for request in data['requests']:
|
|
|
|
if request['package'] == package:
|
2014-06-04 18:15:33 +02:00
|
|
|
# Only update if needed (to save calls to get_request)
|
|
|
|
if request['id'] != request_id or not request.get('author'):
|
|
|
|
request['id'] = request_id
|
|
|
|
request['author'] = get_request(self.apiurl, str(request_id)).get_creator()
|
2014-02-12 16:46:54 +01:00
|
|
|
append = False
|
|
|
|
if append:
|
2014-06-04 18:15:33 +02:00
|
|
|
author = get_request(self.apiurl, str(request_id)).get_creator()
|
2014-06-06 16:22:37 +02:00
|
|
|
data['requests'].append({'id': request_id, 'package': package, 'author': author})
|
2014-02-12 16:46:54 +01:00
|
|
|
self.set_prj_pseudometa(project, data)
|
|
|
|
|
2014-02-18 13:30:39 +01:00
|
|
|
def get_request_id_for_package(self, project, package):
|
|
|
|
"""
|
|
|
|
Query the request id from meta
|
2014-03-06 11:43:21 +01:00
|
|
|
:param project: project the package is in
|
2014-02-18 13:30:39 +01:00
|
|
|
:param package: package we want to query for
|
|
|
|
"""
|
|
|
|
data = self.get_prj_pseudometa(project)
|
|
|
|
for x in data['requests']:
|
|
|
|
if x['package'] == package:
|
|
|
|
return int(x['id'])
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_package_for_request_id(self, project, request_id):
|
|
|
|
"""
|
|
|
|
Query the request id from meta
|
2014-03-06 11:43:21 +01:00
|
|
|
:param project: project the package is in
|
2014-02-18 13:30:39 +01:00
|
|
|
:param package: package we want to query for
|
|
|
|
"""
|
|
|
|
data = self.get_prj_pseudometa(project)
|
2014-03-04 18:21:59 +01:00
|
|
|
request_id = int(request_id)
|
2014-02-18 13:30:39 +01:00
|
|
|
for x in data['requests']:
|
|
|
|
if x['id'] == request_id:
|
|
|
|
return x['package']
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _remove_package_from_prj_pseudometa(self, project, package):
|
2014-02-12 16:28:52 +01:00
|
|
|
"""
|
|
|
|
Delete request from the project pseudometa
|
|
|
|
:param project: project to remove from
|
|
|
|
:param package: package we want to remove from meta
|
|
|
|
"""
|
|
|
|
|
|
|
|
data = self.get_prj_pseudometa(project)
|
2014-02-13 13:20:44 +01:00
|
|
|
data['requests'] = filter(lambda x: x['package'] != package, data['requests'])
|
2014-02-15 17:09:11 +01:00
|
|
|
self.set_prj_pseudometa(project, data)
|
2014-02-12 16:28:52 +01:00
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
def rm_from_prj(self, project, package=None, request_id=None,
|
|
|
|
msg=None, review='accepted'):
|
2014-02-13 15:58:12 +01:00
|
|
|
"""
|
|
|
|
Delete request from the project
|
|
|
|
:param project: project to remove from
|
2014-02-18 13:30:39 +01:00
|
|
|
:param request_id: request we want to remove
|
2014-02-13 15:58:12 +01:00
|
|
|
:param msg: message for the log
|
2014-03-21 16:59:43 +01:00
|
|
|
:param review: review state for the review, defautl accepted
|
2014-02-13 15:58:12 +01:00
|
|
|
"""
|
2014-02-25 16:37:54 +01:00
|
|
|
|
2014-03-04 16:32:15 +01:00
|
|
|
if not request_id:
|
2014-02-18 13:30:39 +01:00
|
|
|
request_id = self.get_request_id_for_package(project, package)
|
2014-03-04 16:32:15 +01:00
|
|
|
if not package:
|
2014-02-18 13:30:39 +01:00
|
|
|
package = self.get_package_for_request_id(project, request_id)
|
2014-03-04 16:32:15 +01:00
|
|
|
if not package or not request_id:
|
|
|
|
return
|
2014-02-13 15:58:12 +01:00
|
|
|
|
2014-02-18 13:30:39 +01:00
|
|
|
self._remove_package_from_prj_pseudometa(project, package)
|
2014-05-08 15:05:27 +02:00
|
|
|
subprj = self.map_ring_package_to_subject(project, package)
|
|
|
|
delete_package(self.apiurl, subprj, package, force=True, msg=msg)
|
2014-03-06 11:43:36 +01:00
|
|
|
self.set_review(request_id, project, state=review, msg=msg)
|
2014-02-12 16:28:52 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
def create_package_container(self, project, package, disable_build=False):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
2014-02-13 13:27:29 +01:00
|
|
|
Creates a package container without any fields in project/package
|
|
|
|
:param project: project to create it
|
|
|
|
:param package: package name
|
2014-03-03 10:11:23 +01:00
|
|
|
:param disable_build: should the package be created with build
|
|
|
|
flag disabled
|
2014-02-13 13:27:29 +01:00
|
|
|
"""
|
2014-03-03 10:11:23 +01:00
|
|
|
dst_meta = '<package name="{}"><title/><description/></package>'
|
|
|
|
dst_meta = dst_meta.format(package)
|
2014-02-13 13:27:29 +01:00
|
|
|
if disable_build:
|
|
|
|
root = ET.fromstring(dst_meta)
|
|
|
|
elm = ET.SubElement(root, 'build')
|
|
|
|
ET.SubElement(elm, 'disable')
|
2014-02-13 14:18:16 +01:00
|
|
|
dst_meta = ET.tostring(root)
|
2014-02-13 13:27:29 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['source', project, package, '_meta'])
|
2014-02-13 13:27:29 +01:00
|
|
|
http_PUT(url, data=dst_meta)
|
|
|
|
|
2014-02-17 12:03:07 +01:00
|
|
|
def check_one_request(self, request, project):
|
|
|
|
"""
|
2014-06-04 16:56:56 +02:00
|
|
|
Check if a staging request is ready to be approved. Reviews
|
|
|
|
for the project are ignored, other open reviews will block the
|
2014-03-03 10:11:23 +01:00
|
|
|
acceptance
|
2014-02-17 12:03:07 +01:00
|
|
|
:param project: staging project
|
|
|
|
:param request_id: request id to check
|
2014-03-03 10:11:23 +01:00
|
|
|
|
2014-02-17 12:03:07 +01:00
|
|
|
"""
|
2014-02-25 16:37:54 +01:00
|
|
|
|
2014-06-04 16:56:56 +02:00
|
|
|
url = self.makeurl(['request', str(request)])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-02-17 12:03:07 +01:00
|
|
|
|
|
|
|
# relevant info for printing
|
|
|
|
package = str(root.find('action').find('target').attrib['package'])
|
|
|
|
|
|
|
|
state = root.find('state').get('name')
|
|
|
|
if state in ['declined', 'superseded', 'revoked']:
|
2014-02-28 14:32:02 +01:00
|
|
|
return '{}: {}'.format(package, state)
|
2014-02-17 12:03:07 +01:00
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
# instead of just printing the state of the whole request find
|
|
|
|
# out who is remaining on the review and print it out,
|
|
|
|
# otherwise print out that it is ready for approval and
|
|
|
|
# waiting on others from GR to be accepted
|
2014-02-17 12:03:07 +01:00
|
|
|
review_state = root.findall('review')
|
|
|
|
failing_groups = []
|
|
|
|
for i in review_state:
|
|
|
|
if i.attrib['state'] == 'accepted':
|
|
|
|
continue
|
|
|
|
if i.get('by_project', None) == project:
|
|
|
|
continue
|
|
|
|
for attrib in ['by_group', 'by_user', 'by_project', 'by_package']:
|
|
|
|
value = i.get(attrib, None)
|
|
|
|
if value:
|
|
|
|
failing_groups.append(value)
|
|
|
|
|
|
|
|
if not failing_groups:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
state = 'missing reviews: ' + ', '.join(failing_groups)
|
2014-02-28 14:32:02 +01:00
|
|
|
return '{}: {}'.format(package, state)
|
2014-02-17 12:03:07 +01:00
|
|
|
|
2014-03-04 15:20:05 +01:00
|
|
|
def check_ring_packages(self, project, requests):
|
|
|
|
"""
|
|
|
|
Checks if packages from requests are in some ring or not
|
|
|
|
:param project: project to check
|
|
|
|
:param requests: list of requests to verify
|
|
|
|
:return True (has ring packages) / False (has no ring packages)
|
|
|
|
"""
|
|
|
|
|
|
|
|
for request in requests:
|
|
|
|
pkg = self.get_package_for_request_id(project, request)
|
|
|
|
if pkg in self.ring_packages:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-02-21 07:10:11 +01:00
|
|
|
def check_project_status(self, project, verbose=False):
|
2014-02-17 12:03:07 +01:00
|
|
|
"""
|
2014-03-03 10:11:23 +01:00
|
|
|
Checks a staging project for acceptance. Checks all open
|
|
|
|
requests for open reviews and build status
|
2014-02-17 12:03:07 +01:00
|
|
|
:param project: project to check
|
2014-03-03 10:11:23 +01:00
|
|
|
:return true (ok)/false (empty prj) or list of strings with
|
|
|
|
informations)
|
2014-02-17 12:03:07 +01:00
|
|
|
"""
|
2014-02-17 15:13:27 +01:00
|
|
|
|
2014-02-26 14:51:01 +01:00
|
|
|
# Report
|
|
|
|
report = list()
|
|
|
|
|
2014-03-06 11:46:44 +01:00
|
|
|
# First ensure we dispatched the open requests so we do not
|
|
|
|
# pass projects with update/superseded requests
|
2014-03-07 09:32:48 +01:00
|
|
|
for request in self.get_open_requests():
|
|
|
|
stage_info = self.supseded_request(request)
|
|
|
|
if stage_info and stage_info['prj'] == project:
|
|
|
|
return ['Request {} is superseded'.format(stage_info['rq_id'])]
|
2014-03-06 11:46:44 +01:00
|
|
|
|
2014-02-17 12:03:07 +01:00
|
|
|
# all requests with open review
|
|
|
|
requests = self.list_requests_in_prj(project)
|
2014-03-03 13:13:32 +01:00
|
|
|
open_requests = set(requests)
|
2014-02-25 16:37:54 +01:00
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
# all tracked requests - some of them might be declined, so we
|
|
|
|
# don't see them above
|
2014-02-17 12:03:07 +01:00
|
|
|
meta = self.get_prj_pseudometa(project)
|
|
|
|
for req in meta['requests']:
|
|
|
|
req = req['id']
|
2014-03-03 13:13:32 +01:00
|
|
|
if req in open_requests:
|
|
|
|
open_requests.remove(req)
|
2014-02-17 12:03:07 +01:00
|
|
|
if req not in requests:
|
|
|
|
requests.append(req)
|
2014-03-04 15:20:05 +01:00
|
|
|
if open_requests:
|
2014-03-06 16:34:02 +01:00
|
|
|
return ['Request(s) {} are not tracked but are open for the prj'.format(','.join(map(str, open_requests)))]
|
2014-02-17 12:03:07 +01:00
|
|
|
|
2014-02-26 14:51:01 +01:00
|
|
|
# If we find no requests in staging then it is empty so we ignore it
|
2014-03-04 15:20:05 +01:00
|
|
|
if not requests:
|
2014-02-19 11:48:16 +01:00
|
|
|
return False
|
2014-02-26 14:51:01 +01:00
|
|
|
|
|
|
|
# Check if the requests are acceptable and bail out on
|
|
|
|
# first failure unless verbose as it is slow
|
2014-02-17 12:03:07 +01:00
|
|
|
for request in requests:
|
|
|
|
ret = self.check_one_request(request, project)
|
|
|
|
if ret:
|
2014-02-26 14:51:01 +01:00
|
|
|
report.append(ret)
|
2014-02-21 07:10:11 +01:00
|
|
|
if not verbose:
|
|
|
|
break
|
2014-02-17 12:03:07 +01:00
|
|
|
|
2014-03-04 15:20:05 +01:00
|
|
|
# Check the build/openQA only if we have some ring packages
|
|
|
|
if self.check_ring_packages(project, requests):
|
|
|
|
# Check the buildstatus
|
|
|
|
buildstatus = self.gather_build_status(project)
|
|
|
|
if buildstatus:
|
|
|
|
# here no append as we are adding list to list
|
|
|
|
report += self.generate_build_status_details(buildstatus, verbose)
|
|
|
|
# Check the openqa state
|
|
|
|
ret = self.find_openqa_state(project)
|
|
|
|
if ret:
|
|
|
|
report.append(ret)
|
2014-02-26 14:51:01 +01:00
|
|
|
|
|
|
|
if report:
|
|
|
|
return report
|
2014-05-08 11:57:37 +02:00
|
|
|
elif not self.project_exists(project + ":DVD"):
|
2014-02-26 14:51:01 +01:00
|
|
|
# The only case we are green
|
2014-02-19 11:48:16 +01:00
|
|
|
return True
|
|
|
|
|
2014-05-08 11:57:37 +02:00
|
|
|
# now check the same for the subprj
|
|
|
|
project = project + ":DVD"
|
|
|
|
buildstatus = self.gather_build_status(project)
|
|
|
|
|
|
|
|
if buildstatus:
|
|
|
|
report += self.generate_build_status_details(buildstatus, verbose)
|
2014-06-04 16:56:56 +02:00
|
|
|
|
2014-05-08 15:19:26 +02:00
|
|
|
# Check the openqa state
|
|
|
|
ret = self.find_openqa_state(project)
|
|
|
|
if ret:
|
|
|
|
report.append(ret)
|
2014-05-08 11:57:37 +02:00
|
|
|
|
|
|
|
if report:
|
|
|
|
return report
|
2014-06-04 16:56:56 +02:00
|
|
|
|
2014-05-08 11:57:37 +02:00
|
|
|
return True
|
|
|
|
|
2014-03-03 14:54:27 +01:00
|
|
|
def days_since_last_freeze(self, project):
|
|
|
|
"""
|
|
|
|
Checks the last update for the frozen links
|
|
|
|
:param project: project to check
|
|
|
|
:return age in days(float) of the last update
|
|
|
|
"""
|
2014-06-04 16:56:56 +02:00
|
|
|
url = self.makeurl(['source', project, '_project'], {'meta': '1'})
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-03-03 14:54:27 +01:00
|
|
|
for entry in root.findall('entry'):
|
|
|
|
if entry.get('name') == '_frozenlinks':
|
|
|
|
return (time.time() - float(entry.get('mtime')))/3600/24
|
2014-03-05 13:55:00 +01:00
|
|
|
return 100000 # quite some!
|
2014-03-03 14:54:27 +01:00
|
|
|
|
2014-05-09 06:54:36 +02:00
|
|
|
def find_openqa_jobs(self, project):
|
2014-06-04 16:56:56 +02:00
|
|
|
url = self.makeurl(['build', project, 'images', 'x86_64', 'Test-DVD-x86_64'])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-02-17 15:13:27 +01:00
|
|
|
|
|
|
|
filename = None
|
|
|
|
for binary in root.findall('binary'):
|
|
|
|
filename = binary.get('filename', '')
|
|
|
|
if filename.endswith('.iso'):
|
|
|
|
break
|
2014-03-24 14:46:50 +01:00
|
|
|
|
2014-02-17 15:13:27 +01:00
|
|
|
if not filename:
|
2014-03-18 13:26:12 +01:00
|
|
|
return None
|
2014-02-17 15:13:27 +01:00
|
|
|
|
2014-05-08 19:02:41 +02:00
|
|
|
jobtemplate = '-Staging'
|
2014-05-08 11:57:37 +02:00
|
|
|
if project.endswith(':DVD'):
|
2014-05-08 19:02:41 +02:00
|
|
|
jobtemplate = '-Staging2'
|
2014-05-08 11:57:37 +02:00
|
|
|
project = project[:-4]
|
|
|
|
|
2014-05-08 19:02:41 +02:00
|
|
|
jobname = 'openSUSE-Staging:'
|
|
|
|
jobname += project.split(':')[-1] + jobtemplate
|
2014-04-04 14:58:55 +02:00
|
|
|
result = re.match('Test-Build([^-]+)-Media.iso', filename)
|
2014-05-08 19:02:41 +02:00
|
|
|
jobname += '-DVD-x86_64-Build' + result.group(1) + "-Media.iso"
|
2014-04-04 10:11:49 +02:00
|
|
|
|
|
|
|
try:
|
2014-04-15 19:58:27 +02:00
|
|
|
url = "https://openqa.opensuse.org/api/v1/jobs?iso={}".format(jobname)
|
2014-04-04 10:11:49 +02:00
|
|
|
f = urllib2.urlopen(url)
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
jobs = json.load(f)['jobs']
|
2014-03-24 14:46:50 +01:00
|
|
|
|
2014-05-08 11:57:37 +02:00
|
|
|
bestjobs = {}
|
2014-03-18 07:56:30 +01:00
|
|
|
for job in jobs:
|
2014-05-08 15:19:26 +02:00
|
|
|
if job['result'] != 'incomplete' and not job['clone_id']:
|
2014-06-06 16:22:37 +02:00
|
|
|
if job['test'] == 'miniuefi':
|
|
|
|
continue
|
2014-05-08 13:26:27 +02:00
|
|
|
if job['name'] not in bestjobs or bestjobs[job['name']]['result'] != 'passed':
|
2014-05-08 11:57:37 +02:00
|
|
|
bestjobs[job['name']] = job
|
|
|
|
|
2014-05-09 06:54:36 +02:00
|
|
|
return bestjobs
|
2014-03-18 07:56:30 +01:00
|
|
|
|
|
|
|
def find_openqa_state(self, project):
|
|
|
|
"""
|
|
|
|
Checks the openqa state of the project
|
|
|
|
:param project: project to check
|
|
|
|
:return None or list with issue informations
|
|
|
|
"""
|
|
|
|
|
2014-05-09 06:54:36 +02:00
|
|
|
jobs = self.find_openqa_jobs(project)
|
2014-02-17 15:13:27 +01:00
|
|
|
|
2014-05-08 11:57:37 +02:00
|
|
|
if not jobs:
|
|
|
|
return 'No openQA result yet'
|
2014-02-17 15:13:27 +01:00
|
|
|
|
2014-05-09 06:54:36 +02:00
|
|
|
for job in jobs.values():
|
|
|
|
check = self.check_if_job_is_ok(job)
|
2014-06-04 16:56:56 +02:00
|
|
|
if check:
|
|
|
|
return check
|
2014-02-17 15:13:27 +01:00
|
|
|
|
|
|
|
return None
|
2014-02-17 12:03:07 +01:00
|
|
|
|
2014-05-09 06:54:36 +02:00
|
|
|
def check_if_job_is_ok(self, job):
|
2014-06-04 16:56:56 +02:00
|
|
|
url = 'https://openqa.opensuse.org/tests/{}/file/results.json'.format(job['id'])
|
|
|
|
try:
|
|
|
|
f = urllib2.urlopen(url)
|
|
|
|
except urllib2.HTTPError:
|
|
|
|
return "Can't open {}".format(url)
|
|
|
|
|
|
|
|
try:
|
|
|
|
openqa = json.load(f)
|
|
|
|
except ValueError:
|
|
|
|
return "Can't decode {}".format(url)
|
|
|
|
|
|
|
|
overall = openqa.get('overall', 'inprogress')
|
2014-06-06 07:35:21 +02:00
|
|
|
if job['test'] == 'miniuefi':
|
2014-06-04 16:56:56 +02:00
|
|
|
return None # ignore
|
|
|
|
# pprint.pprint(openqa)
|
|
|
|
# pprint.pprint(job)
|
|
|
|
if overall != 'ok':
|
2014-06-06 07:35:21 +02:00
|
|
|
return "openQA's overall status is {} for https://openqa.opensuse.org/tests/{}".format(overall, job['id'])
|
2014-06-04 16:56:56 +02:00
|
|
|
|
|
|
|
for module in openqa['testmodules']:
|
|
|
|
# zypper_in fails at the moment - urgent fix needed
|
|
|
|
if module['result'] == 'ok':
|
|
|
|
continue
|
|
|
|
if module['name'] in []:
|
|
|
|
continue
|
2014-06-06 07:35:21 +02:00
|
|
|
return '{} test failed: https://openqa.opensuse.org/tests/{}'.format(module['name'], job['id'])
|
2014-06-04 16:56:56 +02:00
|
|
|
return None
|
|
|
|
|
2014-02-17 12:03:07 +01:00
|
|
|
def gather_build_status(self, project):
|
2014-02-14 18:49:50 +01:00
|
|
|
"""
|
|
|
|
Checks whether everything is built in project
|
|
|
|
:param project: project to check
|
|
|
|
"""
|
|
|
|
# Get build results
|
2014-06-04 16:56:56 +02:00
|
|
|
url = self.makeurl(['build', project, '_result?code=failed&code=broken&code=unresolvable'])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-02-14 18:49:50 +01:00
|
|
|
|
|
|
|
# Check them
|
|
|
|
broken = []
|
|
|
|
working = []
|
|
|
|
# Iterate through repositories
|
|
|
|
for results in root.findall('result'):
|
2014-02-17 21:24:06 +01:00
|
|
|
building = False
|
2014-02-28 14:32:02 +01:00
|
|
|
if results.get('state') not in ('published', 'unpublished') or results.get('dirty') == 'true':
|
|
|
|
working.append({
|
2014-03-03 10:11:23 +01:00
|
|
|
'path': '{}/{}'.format(results.get('repository'),
|
|
|
|
results.get('arch')),
|
2014-02-28 14:32:02 +01:00
|
|
|
'state': results.get('state')
|
|
|
|
})
|
2014-02-17 21:24:06 +01:00
|
|
|
building = True
|
2014-02-14 18:49:50 +01:00
|
|
|
# Iterate through packages
|
|
|
|
for node in results:
|
|
|
|
# Find broken
|
2014-02-28 14:32:02 +01:00
|
|
|
result = node.get('code')
|
2014-06-04 16:56:56 +02:00
|
|
|
if result in ('broken', 'failed') or (result == 'unresolvable' and not building):
|
2014-02-28 14:32:02 +01:00
|
|
|
broken.append({
|
|
|
|
'pkg': node.get('package'),
|
|
|
|
'state': result,
|
2014-03-03 10:11:23 +01:00
|
|
|
'path': '{}/{}'.format(results.get('repository'),
|
|
|
|
results.get('arch'))
|
2014-02-28 14:32:02 +01:00
|
|
|
})
|
2014-02-14 18:49:50 +01:00
|
|
|
|
|
|
|
# Print the results
|
2014-03-04 15:35:52 +01:00
|
|
|
if not working and not broken:
|
2014-02-17 12:03:07 +01:00
|
|
|
return None
|
2014-02-14 18:49:50 +01:00
|
|
|
else:
|
2014-02-17 12:03:07 +01:00
|
|
|
return [project, working, broken]
|
|
|
|
|
2014-02-26 14:51:01 +01:00
|
|
|
def generate_build_status_details(self, details, verbose=False):
|
|
|
|
"""
|
|
|
|
Generate list of strings for the buildstatus detail report.
|
|
|
|
:param details: buildstatus informations about project
|
|
|
|
:return list of strings for printing
|
|
|
|
"""
|
|
|
|
|
|
|
|
retval = list()
|
2014-02-27 15:11:43 +01:00
|
|
|
if not isinstance(details, list):
|
|
|
|
return retval
|
2014-02-17 12:03:07 +01:00
|
|
|
project, working, broken = details
|
|
|
|
|
2014-03-04 15:35:52 +01:00
|
|
|
if working:
|
2014-02-26 14:51:01 +01:00
|
|
|
retval.append('At least following repositories is still building:')
|
2014-02-17 12:03:07 +01:00
|
|
|
for i in working:
|
2014-02-28 14:32:02 +01:00
|
|
|
retval.append(' {}: {}'.format(i['path'], i['state']))
|
2014-02-21 07:10:11 +01:00
|
|
|
if not verbose:
|
|
|
|
break
|
2014-03-04 15:35:52 +01:00
|
|
|
if broken:
|
2014-02-26 14:51:01 +01:00
|
|
|
retval.append('Following packages are broken:')
|
2014-02-17 12:03:07 +01:00
|
|
|
for i in broken:
|
2014-03-03 10:11:23 +01:00
|
|
|
retval.append(' {} ({}): {}'.format(i['pkg'], i['path'],
|
|
|
|
i['state']))
|
2014-02-26 14:51:01 +01:00
|
|
|
|
|
|
|
return retval
|
2014-02-17 14:28:18 +01:00
|
|
|
|
2014-02-13 14:38:30 +01:00
|
|
|
def rq_to_prj(self, request_id, project):
|
2014-02-12 16:46:54 +01:00
|
|
|
"""
|
2014-02-13 14:38:30 +01:00
|
|
|
Links request to project - delete or submit
|
2014-02-12 16:46:54 +01:00
|
|
|
:param request_id: request to link
|
|
|
|
:param project: project to link into
|
|
|
|
"""
|
|
|
|
# read info from sr
|
2014-02-13 14:38:30 +01:00
|
|
|
tar_pkg = None
|
|
|
|
|
2014-02-15 17:09:11 +01:00
|
|
|
req = get_request(self.apiurl, str(request_id))
|
2014-02-12 16:46:54 +01:00
|
|
|
if not req:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
2014-02-13 14:38:30 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
act = req.get_actions('submit')
|
2014-02-13 14:38:30 +01:00
|
|
|
if act:
|
2014-02-14 21:35:59 +01:00
|
|
|
tar_pkg = self.submit_to_prj(act[0], project)
|
2014-02-13 14:38:30 +01:00
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
act = req.get_actions('delete')
|
2014-02-13 14:38:30 +01:00
|
|
|
if act:
|
|
|
|
tar_pkg = self.delete_to_prj(act[0], project)
|
|
|
|
|
|
|
|
if not tar_pkg:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'Request {} is not a submit or delete request'
|
|
|
|
msg = msg.format(request_id)
|
|
|
|
raise oscerr.WrongArgs(msg)
|
2014-02-13 14:38:30 +01:00
|
|
|
|
|
|
|
# register the package name
|
|
|
|
self._add_rq_to_prj_pseudometa(project, int(request_id), tar_pkg)
|
|
|
|
|
2014-02-17 14:41:13 +01:00
|
|
|
# add review
|
2014-02-17 16:09:30 +01:00
|
|
|
self.add_review(request_id, project)
|
2014-02-17 14:41:13 +01:00
|
|
|
|
2014-02-17 16:35:06 +01:00
|
|
|
# now remove the staging checker
|
2014-03-21 16:59:43 +01:00
|
|
|
self.do_change_review_state(request_id, 'accepted',
|
2014-06-04 16:56:56 +02:00
|
|
|
by_group='factory-staging',
|
|
|
|
message='Picked {}'.format(project))
|
2014-03-04 15:00:25 +01:00
|
|
|
return True
|
2014-02-17 16:35:06 +01:00
|
|
|
|
2014-05-08 13:59:57 +02:00
|
|
|
def map_ring_package_to_subject(self, project, pkg):
|
|
|
|
"""
|
|
|
|
Returns the subproject (if any) to use for the pkg depending on the ring
|
|
|
|
the package is in
|
|
|
|
:param project the staging prj
|
|
|
|
:param pkg the package to add
|
|
|
|
"""
|
|
|
|
# it's actually a pretty stupid algorithm, but it might become more complex later
|
|
|
|
|
|
|
|
if self.ring_packages.get(pkg) == 'openSUSE:Factory:Rings:2-TestDVD':
|
|
|
|
return project + ":DVD"
|
|
|
|
|
|
|
|
return project
|
|
|
|
|
2014-02-13 14:38:30 +01:00
|
|
|
def delete_to_prj(self, act, project):
|
|
|
|
"""
|
|
|
|
Hides Package in project
|
|
|
|
:param act: action for delete request
|
|
|
|
:param project: project to hide in
|
|
|
|
"""
|
|
|
|
|
|
|
|
tar_pkg = act.tgt_package
|
2014-05-08 13:59:57 +02:00
|
|
|
project = self.map_ring_package_to_subject(project, tar_pkg)
|
2014-02-13 14:38:30 +01:00
|
|
|
|
|
|
|
# create build disabled package
|
|
|
|
self.create_package_container(project, tar_pkg, disable_build=True)
|
|
|
|
# now trigger wipebinaries to emulate a delete
|
2014-03-03 10:11:23 +01:00
|
|
|
url = self.makeurl(['build', project],
|
|
|
|
{'cmd': 'wipe', 'package': tar_pkg})
|
2014-02-13 14:38:30 +01:00
|
|
|
http_POST(url)
|
|
|
|
|
|
|
|
return tar_pkg
|
|
|
|
|
2014-02-14 21:35:59 +01:00
|
|
|
def submit_to_prj(self, act, project):
|
2014-02-13 14:38:30 +01:00
|
|
|
"""
|
|
|
|
Links sources from request to project
|
|
|
|
:param act: action for submit request
|
|
|
|
:param project: project to link into
|
|
|
|
"""
|
2014-02-12 16:46:54 +01:00
|
|
|
|
|
|
|
src_prj = act.src_project
|
|
|
|
src_rev = act.src_rev
|
|
|
|
src_pkg = act.src_package
|
|
|
|
tar_pkg = act.tgt_package
|
|
|
|
|
2014-02-19 12:11:56 +01:00
|
|
|
disable_build = False
|
|
|
|
if not self.ring_packages.get(tar_pkg):
|
|
|
|
disable_build = True
|
2014-05-08 13:59:57 +02:00
|
|
|
else:
|
|
|
|
project = self.map_ring_package_to_subject(project, tar_pkg)
|
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
self.create_package_container(project, tar_pkg,
|
|
|
|
disable_build=disable_build)
|
2014-02-13 13:27:29 +01:00
|
|
|
|
2014-05-08 13:59:57 +02:00
|
|
|
# if it's disabled anyway, it doesn't make a difference if we link or not
|
|
|
|
if disable_build:
|
|
|
|
return tar_pkg
|
|
|
|
|
2014-02-12 16:46:54 +01:00
|
|
|
# expand the revision to a md5
|
2014-03-03 10:11:23 +01:00
|
|
|
url = self.makeurl(['source', src_prj, src_pkg],
|
|
|
|
{'rev': src_rev, 'expand': 1})
|
2014-02-12 16:46:54 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
2014-02-28 14:32:02 +01:00
|
|
|
src_rev = root.attrib['srcmd5']
|
2014-02-18 14:36:32 +01:00
|
|
|
src_vrev = root.attrib.get('vrev')
|
2014-02-12 16:46:54 +01:00
|
|
|
|
2014-03-03 10:11:23 +01:00
|
|
|
# link stuff - not using linkpac because linkpac copies meta
|
|
|
|
# from source
|
|
|
|
root = ET.Element('link', package=src_pkg, project=src_prj,
|
|
|
|
rev=src_rev)
|
2014-02-18 14:36:32 +01:00
|
|
|
if src_vrev:
|
|
|
|
root.attrib['vrev'] = src_vrev
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['source', project, tar_pkg, '_link'])
|
2014-02-13 13:27:29 +01:00
|
|
|
http_PUT(url, data=ET.tostring(root))
|
2014-02-13 14:38:30 +01:00
|
|
|
return tar_pkg
|
2014-02-17 12:03:07 +01:00
|
|
|
|
|
|
|
def prj_from_letter(self, letter):
|
2014-02-28 14:32:02 +01:00
|
|
|
if ':' in letter: # not a letter
|
2014-02-17 12:03:07 +01:00
|
|
|
return letter
|
|
|
|
return 'openSUSE:Factory:Staging:%s' % letter
|
|
|
|
|
|
|
|
def list_requests_in_prj(self, project):
|
|
|
|
where = "@by_project='%s'+and+@state='new'" % project
|
|
|
|
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['search', 'request', 'id'],
|
|
|
|
"match=state/@name='review'+and+review[%s]" % where)
|
2014-02-17 12:03:07 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
list = []
|
|
|
|
for rq in root.findall('request'):
|
|
|
|
list.append(int(rq.get('id')))
|
|
|
|
|
|
|
|
return list
|
2014-02-17 14:41:13 +01:00
|
|
|
|
2014-02-20 14:22:33 +01:00
|
|
|
def add_review(self, request_id, by_project=None, by_group=None, msg=None):
|
2014-02-17 14:41:13 +01:00
|
|
|
"""
|
|
|
|
Adds review by project to the request
|
|
|
|
:param request_id: request to add review to
|
|
|
|
:param project: project to assign review to
|
|
|
|
"""
|
|
|
|
req = get_request(self.apiurl, str(request_id))
|
|
|
|
if not req:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
2014-02-17 14:41:13 +01:00
|
|
|
for i in req.reviews:
|
2014-02-20 11:43:28 +01:00
|
|
|
if by_project and i.by_project == by_project and i.state == 'new':
|
2014-02-17 14:41:13 +01:00
|
|
|
return
|
2014-02-20 11:43:28 +01:00
|
|
|
if by_group and i.by_group == by_group and i.state == 'new':
|
|
|
|
return
|
|
|
|
|
2014-03-24 14:46:50 +01:00
|
|
|
# don't try to change reviews if the request is dead
|
2014-06-04 16:56:56 +02:00
|
|
|
if req.state.name not in ('new', 'review'):
|
2014-03-24 14:46:50 +01:00
|
|
|
return
|
|
|
|
|
2014-02-20 14:22:33 +01:00
|
|
|
query = {}
|
2014-02-20 11:43:28 +01:00
|
|
|
if by_project:
|
|
|
|
query['by_project'] = by_project
|
2014-02-20 14:22:33 +01:00
|
|
|
if not msg:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'Being evaluated by staging project "{}"'
|
|
|
|
msg = msg.format(by_project)
|
2014-02-20 11:43:28 +01:00
|
|
|
if by_group:
|
|
|
|
query['by_group'] = by_group
|
2014-02-20 14:22:33 +01:00
|
|
|
if not msg:
|
2014-02-28 14:32:02 +01:00
|
|
|
msg = 'Being evaluated by group "{}"'.format(by_group)
|
2014-03-04 15:35:52 +01:00
|
|
|
if not query:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('We need a group or a project')
|
2014-02-20 14:22:33 +01:00
|
|
|
query['cmd'] = 'addreview'
|
2014-02-28 14:32:02 +01:00
|
|
|
url = self.makeurl(['request', str(request_id)], query)
|
2014-02-20 11:43:28 +01:00
|
|
|
http_POST(url, data=msg)
|
2014-02-17 14:41:13 +01:00
|
|
|
|
2014-03-06 11:43:36 +01:00
|
|
|
def set_review(self, request_id, project, state='accepted', msg=None):
|
2014-02-17 14:41:13 +01:00
|
|
|
"""
|
|
|
|
Sets review for request done by project
|
|
|
|
:param request_id: request to change review for
|
|
|
|
:param project: project to do the review
|
|
|
|
"""
|
|
|
|
req = get_request(self.apiurl, str(request_id))
|
|
|
|
if not req:
|
2014-02-28 14:32:02 +01:00
|
|
|
raise oscerr.WrongArgs('Request {} not found'.format(request_id))
|
2014-03-06 11:43:36 +01:00
|
|
|
# don't try to change reviews if the request is dead
|
2014-06-04 16:56:56 +02:00
|
|
|
if req.state.name not in ('new', 'review'):
|
2014-03-06 11:43:36 +01:00
|
|
|
return
|
2014-02-17 14:41:13 +01:00
|
|
|
cont = False
|
|
|
|
for i in req.reviews:
|
|
|
|
if i.by_project == project and i.state == 'new':
|
|
|
|
cont = True
|
2014-03-06 11:43:36 +01:00
|
|
|
if not cont:
|
|
|
|
return
|
|
|
|
if not msg:
|
2014-03-03 10:11:23 +01:00
|
|
|
msg = 'Reviewed by staging project "{}" with result: "{}"'
|
|
|
|
msg = msg.format(project, state)
|
2014-03-21 16:59:43 +01:00
|
|
|
self.do_change_review_state(request_id, state, by_project=project,
|
2014-06-04 16:56:56 +02:00
|
|
|
message=msg)
|
2014-02-19 11:48:16 +01:00
|
|
|
|
2014-05-27 15:38:51 +02:00
|
|
|
def switch_flag_in_prj(self, project, flag='build', state='disable', repository=None, arch=None):
|
2014-03-06 18:20:35 +01:00
|
|
|
url = self.makeurl(['source', project, '_meta'])
|
2014-02-19 11:48:16 +01:00
|
|
|
prjmeta = ET.parse(http_GET(url)).getroot()
|
|
|
|
|
2014-05-27 15:38:51 +02:00
|
|
|
flagxml = prjmeta.find(flag)
|
2014-06-04 16:56:56 +02:00
|
|
|
if not flagxml: # appending is fine
|
2014-05-27 15:38:51 +02:00
|
|
|
flagxml = ET.SubElement(prjmeta, flag)
|
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
foundone = False
|
2014-05-27 15:38:51 +02:00
|
|
|
for build in flagxml:
|
|
|
|
if build.get('repository', None) == repository and build.get('arch', None) == arch:
|
2014-03-06 18:20:35 +01:00
|
|
|
build.tag = state
|
2014-02-19 11:48:16 +01:00
|
|
|
foundone = True
|
|
|
|
|
|
|
|
# need to add a global one
|
|
|
|
if not foundone:
|
2014-05-27 15:38:51 +02:00
|
|
|
query = {}
|
|
|
|
if arch:
|
|
|
|
query['arch'] = arch
|
|
|
|
if repository:
|
|
|
|
query['repository'] = repository
|
|
|
|
ET.SubElement(flagxml, state, query)
|
|
|
|
|
2014-02-19 11:48:16 +01:00
|
|
|
http_PUT(url, data=ET.tostring(prjmeta))
|
2014-03-03 17:18:18 +01:00
|
|
|
|
2014-05-27 15:38:51 +02:00
|
|
|
def build_switch_prj(self, project, state):
|
|
|
|
"""
|
|
|
|
Switch build state of project to desired state
|
|
|
|
:param project: project to switch state for
|
|
|
|
:param state: desired state for build
|
|
|
|
"""
|
|
|
|
self.switch_flag_in_prj(project, flag='build', state=state, repository=None, arch=None)
|
|
|
|
|
2014-03-03 17:18:18 +01:00
|
|
|
def prj_frozen_enough(self, project):
|
|
|
|
"""
|
|
|
|
Check if we can and should refreeze the prj"
|
|
|
|
:param project the project to check
|
|
|
|
:returns True if we can select into it
|
|
|
|
"""
|
|
|
|
|
|
|
|
data = self.get_prj_pseudometa(project)
|
|
|
|
if data['requests']:
|
2014-03-05 13:55:00 +01:00
|
|
|
return True # already has content
|
2014-03-03 17:18:18 +01:00
|
|
|
|
|
|
|
# young enough
|
|
|
|
if self.days_since_last_freeze(project) < 6.5:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2014-03-24 15:15:47 +01:00
|
|
|
|
|
|
|
def build_switch_staging_project(self, target_project):
|
|
|
|
"""
|
|
|
|
Verify what packages are in project and switch the build
|
|
|
|
accordingly.
|
|
|
|
:param target_project: project we validate and switch
|
|
|
|
"""
|
|
|
|
meta = self.get_prj_pseudometa(target_project)
|
|
|
|
staged_requests = list()
|
|
|
|
for request in meta['requests']:
|
|
|
|
staged_requests.append(request['id'])
|
2014-05-08 13:29:38 +02:00
|
|
|
target_flag = 'disable'
|
2014-03-24 15:15:47 +01:00
|
|
|
if self.check_ring_packages(target_project, staged_requests):
|
2014-05-08 13:29:38 +02:00
|
|
|
target_flag = 'enable'
|
|
|
|
self.build_switch_prj(target_project, target_flag)
|
|
|
|
|
|
|
|
if self.project_exists(target_project + ":DVD"):
|
|
|
|
self.build_switch_prj(target_project + ":DVD", target_flag)
|
2014-06-04 16:33:11 +02:00
|
|
|
|
2014-05-06 11:19:49 +02:00
|
|
|
def project_exists(self, project):
|
|
|
|
"""
|
|
|
|
Return true if the given project exists
|
|
|
|
:param project: project name to check
|
|
|
|
"""
|
2014-05-08 14:51:36 +02:00
|
|
|
url = self.makeurl(['source', project, '_meta'])
|
2014-05-06 11:19:49 +02:00
|
|
|
try:
|
2014-06-04 16:33:11 +02:00
|
|
|
http_GET(url)
|
2014-05-06 11:19:49 +02:00
|
|
|
except urllib2.HTTPError:
|
|
|
|
return False
|
|
|
|
return True
|
2014-06-04 16:54:21 +02:00
|
|
|
|
|
|
|
def update_status_comments(self, project, command):
|
|
|
|
"""
|
|
|
|
Refresh the status comments, used for notification purposes, based on
|
|
|
|
the current list of requests. To ensure that all involved users
|
|
|
|
(and nobody else) get notified, old status comments are deleted and
|
|
|
|
a new one is created.
|
|
|
|
:param project: project name
|
|
|
|
:param command: name of the command to include in the message
|
|
|
|
"""
|
|
|
|
|
|
|
|
# TODO: we need to discuss the best way to keep track of status
|
2014-06-04 18:34:08 +02:00
|
|
|
# comments. Right now they are marked with an initial markdown
|
|
|
|
# comment. Maybe a cleaner approach would be to store something
|
2014-06-04 16:54:21 +02:00
|
|
|
# like 'last_status_comment_id' in the pseudometa. But the current
|
|
|
|
# OBS API for adding comments doesn't return the id of the created
|
|
|
|
# comment.
|
|
|
|
|
|
|
|
comment_api = CommentAPI(self.apiurl)
|
|
|
|
|
|
|
|
comments = comment_api.get_comments(project_name=project)
|
|
|
|
for comment in comments.values():
|
|
|
|
# TODO: update the comment removing the user mentions instead of
|
|
|
|
# deleting the whole comment. But there is currently not call in
|
|
|
|
# OBS API to update a comment
|
2014-06-04 18:34:08 +02:00
|
|
|
if comment['comment'].startswith('<!--- osc staging'):
|
2014-06-04 16:54:21 +02:00
|
|
|
comment_api.delete(comment['id'])
|
2014-06-06 16:22:37 +02:00
|
|
|
break # There can be only one! (if we keep deleting them)
|
2014-06-04 16:54:21 +02:00
|
|
|
|
|
|
|
meta = self.get_prj_pseudometa(project)
|
2014-06-04 18:34:08 +02:00
|
|
|
lines = ['<!--- osc staging %s --->' % command]
|
2014-06-04 16:54:21 +02:00
|
|
|
lines.append('The list of requests tracked in %s has changed:\n' % project)
|
|
|
|
for req in meta['requests']:
|
2014-06-04 18:03:18 +02:00
|
|
|
author = req.get('autor', None)
|
|
|
|
if not author:
|
2014-06-04 16:54:21 +02:00
|
|
|
# Old style metadata
|
|
|
|
author = get_request(self.apiurl, str(req['id'])).get_creator()
|
2014-06-17 11:39:59 +02:00
|
|
|
lines.append(' * Request#%s for package %s submitted by [AT]%s' % (req['id'], req['package'], author))
|
2014-06-04 16:54:21 +02:00
|
|
|
msg = '\n'.join(lines)
|
|
|
|
comment_api.add_comment(project_name=project, comment=msg)
|