2013-03-21 11:34:18 +01:00
|
|
|
#
|
|
|
|
# (C) 2011 coolo@suse.de, Novell Inc, openSUSE.org
|
|
|
|
# Distribute under GPLv2 or GPLv3
|
|
|
|
#
|
|
|
|
# Copy this script to ~/.osc-plugins/ or /var/lib/osc-plugins .
|
2013-06-20 17:30:54 +02:00
|
|
|
# Then try to run 'osc check_repo --help' to see the usage.
|
2013-03-21 11:34:18 +01:00
|
|
|
|
|
|
|
import os
|
2013-06-20 17:30:54 +02:00
|
|
|
import re
|
|
|
|
import shutil
|
2013-08-09 15:57:45 +02:00
|
|
|
import subprocess
|
2013-09-04 10:33:48 +02:00
|
|
|
import tempfile
|
2013-06-20 17:30:54 +02:00
|
|
|
from urllib import quote_plus
|
|
|
|
import urllib2
|
2014-05-26 16:03:06 +02:00
|
|
|
import sys
|
2013-06-20 17:30:54 +02:00
|
|
|
from xml.etree import cElementTree as ET
|
|
|
|
|
2013-07-24 16:54:55 +02:00
|
|
|
from osc import oscerr
|
|
|
|
from osc import cmdln
|
|
|
|
|
|
|
|
from osc.core import get_binary_file
|
|
|
|
from osc.core import get_buildinfo
|
|
|
|
from osc.core import http_GET
|
|
|
|
from osc.core import http_POST
|
|
|
|
from osc.core import makeurl
|
|
|
|
from osc.core import Request
|
|
|
|
|
2014-02-18 10:00:07 +01:00
|
|
|
# Expand sys.path to search modules inside the pluging directory
|
|
|
|
_plugin_dir = os.path.expanduser('~/.osc-plugins')
|
|
|
|
sys.path.append(_plugin_dir)
|
2014-05-26 16:03:06 +02:00
|
|
|
from osclib.checkrepo import CheckRepo
|
|
|
|
from osclib.cycle import CycleDetector
|
|
|
|
from osclib.memoize import memoize, CACHEDIR
|
|
|
|
|
2013-07-24 16:54:55 +02:00
|
|
|
|
2014-05-23 15:41:51 +02:00
|
|
|
# Directory where download binary packages.
|
|
|
|
DOWNLOADS = os.path.expanduser('~/co/downloads')
|
|
|
|
|
2013-07-24 16:54:55 +02:00
|
|
|
#
|
2013-08-06 15:35:13 +02:00
|
|
|
# XXX - Ugly Hack. Because the way that osc import plugings we need to
|
|
|
|
# declare some functions and objects used in the decorator as global
|
2013-07-24 16:54:55 +02:00
|
|
|
#
|
2013-09-04 10:44:45 +02:00
|
|
|
global tempfile
|
2013-07-24 16:54:55 +02:00
|
|
|
global wraps
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2013-08-06 15:35:13 +02:00
|
|
|
global build
|
|
|
|
global last_build_success
|
2014-05-06 15:22:35 +02:00
|
|
|
global jobhistory
|
2013-08-06 15:35:13 +02:00
|
|
|
|
|
|
|
|
|
|
|
@memoize()
|
|
|
|
def build(apiurl, project, repo, arch, package):
|
2013-09-27 17:34:16 +02:00
|
|
|
root = None
|
2013-08-06 15:35:13 +02:00
|
|
|
try:
|
|
|
|
url = makeurl(apiurl, ['build', project, repo, arch, package])
|
|
|
|
root = http_GET(url).read()
|
|
|
|
except urllib2.HTTPError, e:
|
2014-02-18 10:39:54 +01:00
|
|
|
print('ERROR in URL %s [%s]' % (url, e))
|
2013-08-06 15:35:13 +02:00
|
|
|
return root
|
|
|
|
|
|
|
|
|
|
|
|
@memoize()
|
|
|
|
def last_build_success(apiurl, src_project, tgt_project, src_package, rev):
|
|
|
|
root = None
|
|
|
|
try:
|
|
|
|
url = makeurl(apiurl,
|
|
|
|
['build', src_project,
|
2013-10-01 16:51:43 +02:00
|
|
|
'_result?lastsuccess&package=%s&pathproject=%s&srcmd5=%s' % (
|
|
|
|
quote_plus(src_package),
|
|
|
|
quote_plus(tgt_project),
|
|
|
|
rev)])
|
2013-08-06 15:35:13 +02:00
|
|
|
root = http_GET(url).read()
|
|
|
|
except urllib2.HTTPError, e:
|
2014-02-18 10:39:54 +01:00
|
|
|
print('ERROR in URL %s [%s]' % (url, e))
|
2013-08-06 15:35:13 +02:00
|
|
|
return root
|
|
|
|
|
|
|
|
|
2014-05-16 12:24:52 +02:00
|
|
|
def get_project_repos(apiurl, src_project, tgt_project, src_package, rev):
|
2014-05-15 11:00:33 +02:00
|
|
|
"""Read the repositories of the project from _meta."""
|
2014-05-16 12:24:52 +02:00
|
|
|
# XXX TODO - Shitty logic here. A better proposal is refactorize
|
|
|
|
# _check_repo_buildsuccess.
|
2014-05-15 11:00:33 +02:00
|
|
|
repos = []
|
2014-05-16 12:24:52 +02:00
|
|
|
url = makeurl(apiurl,
|
|
|
|
['build', src_project,
|
|
|
|
'_result?lastsuccess&package=%s&pathproject=%s&srcmd5=%s' % (
|
|
|
|
quote_plus(src_package),
|
|
|
|
quote_plus(tgt_project),
|
|
|
|
rev)])
|
2014-05-15 11:00:33 +02:00
|
|
|
try:
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-05-16 12:24:52 +02:00
|
|
|
for element in root.findall('repository'):
|
|
|
|
archs = [(e.get('arch'), e.get('result')) for e in element.findall('arch')]
|
|
|
|
repos.append((element.get('name'), archs))
|
2014-05-15 11:00:33 +02:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
print('ERROR in URL %s [%s]' % (url, e))
|
|
|
|
return repos
|
|
|
|
|
|
|
|
|
2014-05-16 12:24:52 +02:00
|
|
|
def old_md5(apiurl, src_project, tgt_project, src_package, rev):
|
2014-05-06 15:22:35 +02:00
|
|
|
"""Recollect old MD5 for a package."""
|
2014-05-23 09:34:13 +02:00
|
|
|
# XXX TODO - instead of fixing the limit, use endtime to make
|
2014-05-06 15:22:35 +02:00
|
|
|
# sure that we have the correct time frame.
|
|
|
|
limit = 20
|
|
|
|
query = {
|
2014-05-22 08:35:01 +02:00
|
|
|
'package': src_package,
|
2014-05-06 15:22:35 +02:00
|
|
|
# 'code': 'succeeded',
|
|
|
|
'limit': limit,
|
|
|
|
}
|
2014-05-15 11:00:33 +02:00
|
|
|
|
2014-05-16 12:24:52 +02:00
|
|
|
repositories = get_project_repos(apiurl, src_project, tgt_project,
|
|
|
|
src_package, rev)
|
2014-05-15 11:00:33 +02:00
|
|
|
|
2014-05-22 15:11:48 +02:00
|
|
|
srcmd5_list = []
|
2014-05-16 12:24:52 +02:00
|
|
|
for repository, archs in repositories:
|
|
|
|
for arch, status in archs:
|
2014-05-22 15:11:48 +02:00
|
|
|
if srcmd5_list:
|
2014-05-16 12:24:52 +02:00
|
|
|
break
|
2014-05-22 08:35:01 +02:00
|
|
|
if status not in ('succeeded', 'outdated'):
|
2014-05-16 12:24:52 +02:00
|
|
|
continue
|
2014-05-26 16:03:06 +02:00
|
|
|
|
2014-05-16 12:24:52 +02:00
|
|
|
url = makeurl(apiurl, ['build', src_project, repository, arch, '_jobhistory'],
|
|
|
|
query=query)
|
|
|
|
try:
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2014-05-22 15:11:48 +02:00
|
|
|
srcmd5_list = [e.get('srcmd5') for e in root.findall('jobhist')]
|
2014-05-16 12:24:52 +02:00
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
print('ERROR in URL %s [%s]' % (url, e))
|
2014-05-06 15:22:35 +02:00
|
|
|
|
2014-05-22 15:11:48 +02:00
|
|
|
md5_set = set()
|
|
|
|
for srcmd5 in srcmd5_list:
|
|
|
|
query = {
|
|
|
|
'expand': 1,
|
|
|
|
'rev': srcmd5,
|
|
|
|
}
|
|
|
|
url = makeurl(apiurl, ['source', src_project, src_package], query=query)
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
md5_set.add(root.find('linkinfo').get('srcmd5'))
|
|
|
|
|
2014-05-06 15:22:35 +02:00
|
|
|
return md5_set
|
|
|
|
|
|
|
|
|
2013-06-20 17:30:54 +02:00
|
|
|
def _check_repo_change_review_state(self, opts, id_, newstate, message='', supersed=None):
|
2013-07-22 14:36:15 +02:00
|
|
|
"""Taken from osc/osc/core.py, improved:
|
|
|
|
- verbose option added,
|
|
|
|
- empty by_user=& removed.
|
|
|
|
- numeric id can be int().
|
2013-03-21 11:34:18 +01:00
|
|
|
"""
|
2013-06-20 17:30:54 +02:00
|
|
|
query = {
|
|
|
|
'cmd': 'changereviewstate',
|
|
|
|
'newstate': newstate,
|
|
|
|
'by_user': 'factory-repo-checker',
|
|
|
|
}
|
|
|
|
if supersed:
|
|
|
|
query['superseded_by'] = supersed
|
|
|
|
# if message:
|
|
|
|
# query['comment'] = message
|
|
|
|
|
|
|
|
code = 404
|
2013-10-01 16:51:43 +02:00
|
|
|
url = makeurl(opts.apiurl, ['request', str(id_)], query=query)
|
2013-04-30 11:49:26 +02:00
|
|
|
try:
|
2013-10-01 16:51:43 +02:00
|
|
|
f = http_POST(url, data=message)
|
2013-06-20 17:30:54 +02:00
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
code = root.attrib['code']
|
|
|
|
except urllib2.HTTPError, e:
|
2014-02-18 10:39:54 +01:00
|
|
|
print('ERROR in URL %s [%s]' % (url, e))
|
2013-06-20 17:30:54 +02:00
|
|
|
return code
|
|
|
|
|
2013-03-21 11:34:18 +01:00
|
|
|
|
|
|
|
def _check_repo_find_submit_request(self, opts, project, package):
|
2013-10-01 16:51:43 +02:00
|
|
|
xpath = "(action/target/@project='%s' and "\
|
2014-03-04 14:34:16 +01:00
|
|
|
"action/target/@package='%s' and "\
|
|
|
|
"action/@type='submit' and "\
|
|
|
|
"(state/@name='new' or state/@name='review' or "\
|
|
|
|
"state/@name='accepted'))" % (project, package)
|
2013-04-17 20:43:50 +02:00
|
|
|
try:
|
2014-03-04 14:34:16 +01:00
|
|
|
url = makeurl(opts.apiurl, ['search', 'request'], 'match=%s' % quote_plus(xpath))
|
2013-04-17 20:43:50 +02:00
|
|
|
f = http_GET(url)
|
|
|
|
collection = ET.parse(f).getroot()
|
2013-07-22 17:19:13 +02:00
|
|
|
except urllib2.HTTPError, e:
|
2014-02-18 10:39:54 +01:00
|
|
|
print('ERROR in URL %s [%s]' % (url, e))
|
2013-04-17 20:43:50 +02:00
|
|
|
return None
|
2013-04-16 22:41:52 +02:00
|
|
|
for root in collection.findall('request'):
|
|
|
|
r = Request()
|
|
|
|
r.read(root)
|
2013-04-18 13:33:25 +02:00
|
|
|
return int(r.reqid)
|
2013-04-16 22:41:52 +02:00
|
|
|
return None
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2014-03-04 14:34:16 +01:00
|
|
|
|
2013-04-18 13:33:25 +02:00
|
|
|
def _check_repo_avoid_wrong_friends(self, prj, repo, arch, pkg, opts):
|
2013-09-27 17:34:16 +02:00
|
|
|
xml = build(opts.apiurl, prj, repo, arch, pkg)
|
|
|
|
if xml:
|
|
|
|
root = ET.fromstring(xml)
|
|
|
|
for binary in root.findall('binary'):
|
|
|
|
# if there are binaries, we're out
|
|
|
|
return False
|
2013-04-18 13:33:25 +02:00
|
|
|
return True
|
|
|
|
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2013-04-16 23:10:04 +02:00
|
|
|
def _check_repo_one_request(self, rq, opts):
|
2013-04-16 22:41:52 +02:00
|
|
|
|
|
|
|
class CheckRepoPackage:
|
|
|
|
def __repr__(self):
|
2013-06-20 17:30:54 +02:00
|
|
|
return '[%d:%s/%s]' % (int(self.request), self.sproject, self.spackage)
|
2013-04-18 13:33:25 +02:00
|
|
|
|
2014-03-04 14:34:16 +01:00
|
|
|
def __init__(self):
|
|
|
|
self.updated = False
|
2013-04-30 11:49:26 +02:00
|
|
|
self.error = None
|
2013-05-13 12:48:21 +02:00
|
|
|
self.build_excluded = False
|
|
|
|
|
2013-07-22 14:29:40 +02:00
|
|
|
id_ = int(rq.get('id'))
|
2013-03-21 11:34:18 +01:00
|
|
|
actions = rq.findall('action')
|
2013-07-22 14:29:40 +02:00
|
|
|
if len(actions) > 1:
|
2014-03-04 14:34:16 +01:00
|
|
|
msg = 'only one action per request is supported - create a group instead: '\
|
|
|
|
'https://github.com/SUSE/hackweek/wiki/Improved-Factory-devel-project-submission-workflow'
|
|
|
|
print('DECLINED', msg)
|
|
|
|
self._check_repo_change_review_state(opts, id_, 'declined', message=msg)
|
|
|
|
return []
|
|
|
|
|
2013-04-16 20:31:12 +02:00
|
|
|
act = actions[0]
|
2013-07-22 14:29:40 +02:00
|
|
|
type_ = act.get('type')
|
|
|
|
if type_ != 'submit':
|
2013-10-01 16:51:43 +02:00
|
|
|
msg = 'Unchecked request type %s' % type_
|
|
|
|
print 'ACCEPTED', msg
|
|
|
|
self._check_repo_change_review_state(opts, id_, 'accepted', message=msg)
|
2013-04-16 22:41:52 +02:00
|
|
|
return []
|
2013-03-21 11:34:18 +01:00
|
|
|
|
2013-04-16 20:31:12 +02:00
|
|
|
pkg = act.find('source').get('package')
|
|
|
|
prj = act.find('source').get('project')
|
|
|
|
rev = act.find('source').get('rev')
|
|
|
|
tprj = act.find('target').get('project')
|
|
|
|
tpkg = act.find('target').get('package')
|
2013-03-21 11:34:18 +01:00
|
|
|
|
2013-07-22 14:29:40 +02:00
|
|
|
subm_id = 'SUBMIT(%d):' % id_
|
2013-08-07 17:13:53 +02:00
|
|
|
print '%s %s/%s -> %s/%s' % (subm_id, prj, pkg, tprj, tpkg)
|
2013-04-16 22:41:52 +02:00
|
|
|
|
|
|
|
packs = []
|
|
|
|
p = CheckRepoPackage()
|
|
|
|
p.spackage = pkg
|
|
|
|
p.sproject = prj
|
|
|
|
p.tpackage = tpkg
|
|
|
|
p.tproject = tprj
|
2014-03-04 14:34:16 +01:00
|
|
|
p.group = opts.grouped.get(id_, id_)
|
2013-07-22 14:29:40 +02:00
|
|
|
p.request = id_
|
2013-07-25 13:38:11 +02:00
|
|
|
|
|
|
|
# Get source information about the SR:
|
|
|
|
# - Source MD5
|
|
|
|
# - Entries (.tar.gz, .changes, .spec ...) and MD5
|
2013-04-16 20:31:12 +02:00
|
|
|
try:
|
2013-10-01 16:51:43 +02:00
|
|
|
url = makeurl(opts.apiurl, ['source', prj, pkg, '?expand=1&rev=%s' % rev])
|
2013-04-16 20:31:12 +02:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2013-07-22 17:19:13 +02:00
|
|
|
except urllib2.HTTPError, e:
|
2013-10-01 16:51:43 +02:00
|
|
|
print 'ERROR in URL %s [%s]' % (url, e)
|
2013-04-16 22:41:52 +02:00
|
|
|
return []
|
|
|
|
p.rev = root.attrib['srcmd5']
|
2013-07-25 13:38:11 +02:00
|
|
|
|
|
|
|
# Recover the .spec files
|
2014-04-29 15:01:16 +02:00
|
|
|
specs = [en.attrib['name'][:-5] for en in root.findall('entry')
|
|
|
|
if en.attrib['name'].endswith('.spec')]
|
2013-07-25 13:38:11 +02:00
|
|
|
|
2013-04-16 22:41:52 +02:00
|
|
|
# source checker validated it exists
|
|
|
|
specs.remove(tpkg)
|
|
|
|
packs.append(p)
|
2013-07-25 13:38:11 +02:00
|
|
|
# Validate the rest of the spec files
|
2013-04-16 22:41:52 +02:00
|
|
|
for spec in specs:
|
2013-07-22 16:15:32 +02:00
|
|
|
lprj, lpkg, lmd5 = '', '', ''
|
2013-04-16 22:41:52 +02:00
|
|
|
try:
|
2013-07-22 16:15:32 +02:00
|
|
|
url = makeurl(opts.apiurl, ['source', prj, spec, '?expand=1'])
|
2013-04-16 22:41:52 +02:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
link = root.find('linkinfo')
|
2014-03-04 14:34:16 +01:00
|
|
|
if link is not None:
|
2013-05-13 12:48:21 +02:00
|
|
|
lprj = link.attrib.get('project', '')
|
|
|
|
lpkg = link.attrib.get('package', '')
|
|
|
|
lmd5 = link.attrib['srcmd5']
|
2013-04-16 22:41:52 +02:00
|
|
|
except urllib2.HTTPError:
|
2014-03-04 14:34:16 +01:00
|
|
|
pass # leave lprj
|
2013-07-22 16:15:32 +02:00
|
|
|
|
2013-05-13 12:48:21 +02:00
|
|
|
if lprj != prj or lpkg != pkg and not p.updated:
|
2013-10-01 16:51:43 +02:00
|
|
|
msg = '%s/%s should _link to %s/%s' % (prj, spec, prj, pkg)
|
|
|
|
print 'DECLINED', msg
|
2013-07-22 14:29:40 +02:00
|
|
|
self._check_repo_change_review_state(opts, id_, 'declined', message=msg)
|
2013-05-13 12:48:21 +02:00
|
|
|
p.updated = True
|
2014-05-06 15:22:35 +02:00
|
|
|
|
2013-05-13 12:48:21 +02:00
|
|
|
if lmd5 != p.rev and not p.updated:
|
2014-05-22 08:35:01 +02:00
|
|
|
if lmd5 not in old_md5(opts.apiurl, lprj, p.tproject, spec, p.rev):
|
2014-05-06 15:22:35 +02:00
|
|
|
msg = '%s/%s is a link but has a different md5sum than %s?' % (prj, spec, pkg)
|
|
|
|
else:
|
|
|
|
msg = '%s is no longer the submitted version, please resubmit HEAD' % spec
|
2014-05-22 11:28:59 +02:00
|
|
|
print '[DECLINED] CHECK MANUALLY', msg
|
|
|
|
# self._check_repo_change_review_state(opts, id_, 'declined', message=msg)
|
2013-05-13 12:48:21 +02:00
|
|
|
p.updated = True
|
2013-04-16 22:41:52 +02:00
|
|
|
|
|
|
|
sp = CheckRepoPackage()
|
|
|
|
sp.spackage = spec
|
|
|
|
sp.sproject = prj
|
|
|
|
sp.tpackage = spec
|
|
|
|
sp.tproject = tprj
|
|
|
|
sp.group = p.group
|
2013-07-22 14:29:40 +02:00
|
|
|
sp.request = id_
|
2013-04-16 22:41:52 +02:00
|
|
|
packs.append(sp)
|
|
|
|
sp.rev = root.attrib['srcmd5']
|
|
|
|
return packs
|
|
|
|
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2013-07-24 15:34:08 +02:00
|
|
|
def _check_repo_buildsuccess(self, p, opts):
|
2013-07-24 16:54:55 +02:00
|
|
|
root_xml = last_build_success(opts.apiurl, p.sproject, p.tproject, p.spackage, p.rev)
|
|
|
|
root = ET.fromstring(root_xml)
|
2013-07-24 15:34:08 +02:00
|
|
|
if not root:
|
2013-04-16 22:41:52 +02:00
|
|
|
return False
|
2013-07-22 17:19:13 +02:00
|
|
|
if 'code' in root.attrib:
|
2013-04-16 20:31:12 +02:00
|
|
|
print ET.tostring(root)
|
2013-04-16 22:41:52 +02:00
|
|
|
return False
|
2013-07-22 17:19:13 +02:00
|
|
|
|
2013-04-16 20:31:12 +02:00
|
|
|
result = False
|
2014-01-01 15:53:52 +01:00
|
|
|
p.goodrepos = []
|
2013-04-16 20:31:12 +02:00
|
|
|
missings = {}
|
|
|
|
alldisabled = True
|
|
|
|
foundbuilding = None
|
|
|
|
foundfailed = None
|
2013-05-13 12:48:21 +02:00
|
|
|
|
|
|
|
tocheckrepos = []
|
2013-04-16 20:31:12 +02:00
|
|
|
for repo in root.findall('repository'):
|
2013-07-22 17:19:13 +02:00
|
|
|
archs = [a.attrib['arch'] for a in repo.findall('arch')]
|
|
|
|
foundarchs = len([a for a in archs if a in ('i586', 'x86_64')])
|
2013-05-13 12:48:21 +02:00
|
|
|
if foundarchs == 2:
|
|
|
|
tocheckrepos.append(repo)
|
2013-07-22 17:19:13 +02:00
|
|
|
|
|
|
|
if not tocheckrepos:
|
|
|
|
msg = 'Missing i586 and x86_64 in the repo list'
|
2014-04-22 11:46:43 +02:00
|
|
|
print msg
|
2013-10-02 11:46:35 +02:00
|
|
|
self._check_repo_change_review_state(opts, p.request, 'new', message=msg)
|
|
|
|
# Next line not needed, but for documentation
|
|
|
|
p.updated = True
|
2013-05-13 12:48:21 +02:00
|
|
|
return False
|
2013-07-22 17:19:13 +02:00
|
|
|
|
2013-05-13 12:48:21 +02:00
|
|
|
for repo in tocheckrepos:
|
|
|
|
isgood = True
|
|
|
|
founddisabled = False
|
|
|
|
r_foundbuilding = None
|
|
|
|
r_foundfailed = None
|
|
|
|
r_missings = {}
|
|
|
|
for arch in repo.findall('arch'):
|
2013-07-22 17:19:13 +02:00
|
|
|
if arch.attrib['arch'] not in ('i586', 'x86_64'):
|
2013-05-13 12:48:21 +02:00
|
|
|
continue
|
2013-07-22 17:19:13 +02:00
|
|
|
if 'missing' in arch.attrib:
|
2013-05-13 12:48:21 +02:00
|
|
|
for pkg in arch.attrib['missing'].split(','):
|
|
|
|
if not self._check_repo_avoid_wrong_friends(p.sproject, repo.attrib['name'], arch.attrib['arch'], pkg, opts):
|
|
|
|
missings[pkg] = 1
|
|
|
|
if not (arch.attrib['result'] in ['succeeded', 'excluded']):
|
|
|
|
isgood = False
|
|
|
|
if arch.attrib['result'] == 'excluded' and arch.attrib['arch'] == 'x86_64':
|
|
|
|
p.build_excluded = True
|
|
|
|
if arch.attrib['result'] == 'disabled':
|
|
|
|
founddisabled = True
|
2013-11-11 13:40:22 +01:00
|
|
|
if arch.attrib['result'] == 'failed' or arch.attrib['result'] == 'unknown':
|
|
|
|
# Sometimes an unknown status is equivalent to
|
|
|
|
# disabled, but we map it as failed to have a human
|
|
|
|
# check (no autoreject)
|
2013-05-13 12:48:21 +02:00
|
|
|
r_foundfailed = repo.attrib['name']
|
|
|
|
if arch.attrib['result'] == 'building':
|
|
|
|
r_foundbuilding = repo.attrib['name']
|
|
|
|
if arch.attrib['result'] == 'outdated':
|
|
|
|
msg = "%s's sources were changed after submissions and the old sources never built. Please resubmit" % p.spackage
|
2013-07-22 17:19:13 +02:00
|
|
|
print 'DECLINED', msg
|
2013-10-01 16:51:43 +02:00
|
|
|
self._check_repo_change_review_state(opts, p.request, 'declined', message=msg)
|
2013-10-02 11:46:35 +02:00
|
|
|
# Next line is not needed, but for documentation
|
|
|
|
p.updated = True
|
2013-05-13 12:48:21 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
r_missings = r_missings.keys()
|
2013-04-18 13:33:25 +02:00
|
|
|
for pkg in r_missings:
|
|
|
|
missings[pkg] = 1
|
2013-04-16 20:31:12 +02:00
|
|
|
if not founddisabled:
|
|
|
|
alldisabled = False
|
|
|
|
if isgood:
|
2014-01-01 15:53:52 +01:00
|
|
|
p.goodrepos.append(repo.attrib['name'])
|
2013-04-30 11:49:26 +02:00
|
|
|
result = True
|
2013-04-16 20:31:12 +02:00
|
|
|
if r_foundbuilding:
|
2013-10-01 16:51:43 +02:00
|
|
|
foundbuilding = r_foundbuilding
|
2013-04-16 20:31:12 +02:00
|
|
|
if r_foundfailed:
|
2013-10-01 16:51:43 +02:00
|
|
|
foundfailed = r_foundfailed
|
2013-04-16 20:31:12 +02:00
|
|
|
|
2013-07-25 13:38:11 +02:00
|
|
|
p.missings = sorted(missings)
|
2013-04-30 11:49:26 +02:00
|
|
|
|
2013-05-13 12:48:21 +02:00
|
|
|
if result:
|
2013-04-16 22:41:52 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
if alldisabled:
|
2013-10-02 11:46:35 +02:00
|
|
|
msg = '%s is disabled or does not build against factory. Please fix and resubmit' % p.spackage
|
2013-07-22 17:19:13 +02:00
|
|
|
print 'DECLINED', msg
|
2013-04-16 22:41:52 +02:00
|
|
|
self._check_repo_change_review_state(opts, p.request, 'declined', message=msg)
|
2013-10-02 11:46:35 +02:00
|
|
|
# Next line not needed, but for documentation
|
|
|
|
p.updated = True
|
2013-04-16 22:41:52 +02:00
|
|
|
return False
|
2013-09-27 17:34:16 +02:00
|
|
|
if foundbuilding:
|
2013-10-25 16:19:57 +02:00
|
|
|
msg = '%s is still building for repository %s' % (p.spackage, foundbuilding)
|
2014-04-22 11:46:43 +02:00
|
|
|
print msg
|
2013-10-02 11:46:35 +02:00
|
|
|
self._check_repo_change_review_state(opts, p.request, 'new', message=msg)
|
|
|
|
# Next line not needed, but for documentation
|
|
|
|
p.updated = True
|
2013-04-16 22:41:52 +02:00
|
|
|
return False
|
|
|
|
if foundfailed:
|
2013-10-02 11:46:35 +02:00
|
|
|
msg = '%s failed to build in repository %s - not accepting' % (p.spackage, foundfailed)
|
2013-10-03 08:33:21 +02:00
|
|
|
# failures might be temporary, so don't autoreject but wait for a human to check
|
2014-04-22 11:46:43 +02:00
|
|
|
print msg
|
2013-10-02 11:46:35 +02:00
|
|
|
self._check_repo_change_review_state(opts, p.request, 'new', message=msg)
|
|
|
|
# Next line not needed, but for documentation
|
|
|
|
p.updated = True
|
2013-04-16 22:41:52 +02:00
|
|
|
return False
|
2013-03-21 11:34:18 +01:00
|
|
|
|
2013-04-16 22:41:52 +02:00
|
|
|
return True
|
2013-03-21 11:34:18 +01:00
|
|
|
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2013-07-25 14:18:28 +02:00
|
|
|
def _check_repo_repo_list(self, prj, repo, arch, pkg, opts, ignore=False):
|
2013-04-30 11:49:26 +02:00
|
|
|
url = makeurl(opts.apiurl, ['build', prj, repo, arch, pkg])
|
|
|
|
files = []
|
|
|
|
try:
|
2013-07-25 14:18:28 +02:00
|
|
|
binaries = ET.parse(http_GET(url)).getroot()
|
2013-08-06 15:35:13 +02:00
|
|
|
for bin_ in binaries.findall('binary'):
|
2013-07-25 14:20:52 +02:00
|
|
|
fn = bin_.attrib['filename']
|
2014-01-23 12:33:28 +01:00
|
|
|
mt = int(bin_.attrib['mtime'])
|
2013-08-06 15:35:13 +02:00
|
|
|
result = re.match(r'(.*)-([^-]*)-([^-]*)\.([^-\.]+)\.rpm', fn)
|
2014-03-04 14:34:16 +01:00
|
|
|
if not result:
|
2013-04-30 11:49:26 +02:00
|
|
|
if fn == 'rpmlint.log':
|
2014-01-21 13:50:48 +01:00
|
|
|
files.append((fn, '', '', mt))
|
2013-04-30 11:49:26 +02:00
|
|
|
continue
|
2013-07-25 14:20:52 +02:00
|
|
|
pname = result.group(1)
|
2013-04-30 11:49:26 +02:00
|
|
|
if pname.endswith('-debuginfo') or pname.endswith('-debuginfo-32bit'):
|
|
|
|
continue
|
|
|
|
if pname.endswith('-debugsource'):
|
|
|
|
continue
|
|
|
|
if result.group(4) == 'src':
|
|
|
|
continue
|
2014-01-21 13:50:48 +01:00
|
|
|
files.append((fn, pname, result.group(4), mt))
|
2014-04-29 15:01:16 +02:00
|
|
|
except urllib2.HTTPError:
|
2013-08-06 15:54:40 +02:00
|
|
|
pass
|
|
|
|
# if not ignore:
|
2013-10-01 16:51:43 +02:00
|
|
|
# print 'ERROR in URL %s [%s]' % (url, e)
|
2013-04-30 11:49:26 +02:00
|
|
|
return files
|
|
|
|
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2014-01-21 13:50:48 +01:00
|
|
|
def _check_repo_get_binary(self, apiurl, prj, repo, arch, package, file, target, mtime):
|
2013-04-30 11:49:26 +02:00
|
|
|
if os.path.exists(target):
|
2014-01-21 13:50:48 +01:00
|
|
|
# we need to check the mtime too as the file might get updated
|
|
|
|
cur = os.path.getmtime(target)
|
2014-01-23 12:15:38 +01:00
|
|
|
if cur > mtime:
|
2014-01-21 13:50:48 +01:00
|
|
|
return
|
2014-03-04 14:34:16 +01:00
|
|
|
get_binary_file(apiurl, prj, repo, arch, file, package=package, target_filename=target)
|
|
|
|
|
2013-04-30 11:49:26 +02:00
|
|
|
|
2013-10-03 19:53:54 +02:00
|
|
|
def _get_verifymd5(self, p, rev):
|
|
|
|
try:
|
|
|
|
url = makeurl(self.get_api_url(), ['source', p.sproject, p.spackage, '?view=info&rev=%s' % rev])
|
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
|
|
|
except urllib2.HTTPError, e:
|
|
|
|
print 'ERROR in URL %s [%s]' % (url, e)
|
|
|
|
return []
|
|
|
|
return root.attrib['verifymd5']
|
|
|
|
|
2013-10-25 16:11:31 +02:00
|
|
|
|
2013-10-03 19:53:54 +02:00
|
|
|
def _checker_compare_disturl(self, disturl, p):
|
|
|
|
distmd5 = os.path.basename(disturl).split('-')[0]
|
|
|
|
if distmd5 == p.rev:
|
|
|
|
return True
|
|
|
|
|
|
|
|
vrev1 = self._get_verifymd5(p, p.rev)
|
|
|
|
vrev2 = self._get_verifymd5(p, distmd5)
|
|
|
|
if vrev1 == vrev2:
|
|
|
|
return True
|
2013-10-25 16:11:31 +02:00
|
|
|
print 'ERROR Revision missmatch: %s, %s' % (vrev1, vrev2)
|
2013-10-03 19:53:54 +02:00
|
|
|
return False
|
|
|
|
|
2013-10-25 16:11:31 +02:00
|
|
|
|
2014-01-01 15:53:52 +01:00
|
|
|
def _check_repo_download(self, p, opts):
|
2014-01-02 21:27:20 +01:00
|
|
|
p.downloads = dict()
|
|
|
|
|
2013-05-13 12:48:21 +02:00
|
|
|
if p.build_excluded:
|
2014-01-01 17:26:48 +01:00
|
|
|
return set()
|
2013-05-13 12:48:21 +02:00
|
|
|
|
2014-01-01 15:53:52 +01:00
|
|
|
for repo in p.goodrepos:
|
|
|
|
# we can assume x86_64 is there
|
|
|
|
todownload = []
|
|
|
|
for fn in self._check_repo_repo_list(p.sproject, repo, 'x86_64', p.spackage, opts):
|
2014-01-21 13:50:48 +01:00
|
|
|
todownload.append(('x86_64', fn[0], fn[3]))
|
2014-01-01 15:53:52 +01:00
|
|
|
|
|
|
|
# now fetch -32bit packs
|
2014-05-26 16:03:06 +02:00
|
|
|
# for fn in self._check_repo_repo_list(p.sproject, repo, 'i586', p.spackage, opts):
|
2014-05-18 21:42:54 +02:00
|
|
|
# if fn[2] == 'x86_64':
|
|
|
|
# todownload.append(('i586', fn[0], fn[3]))
|
2014-01-01 15:53:52 +01:00
|
|
|
|
|
|
|
p.downloads[repo] = []
|
2014-01-21 13:50:48 +01:00
|
|
|
for arch, fn, mt in todownload:
|
2014-05-23 15:41:51 +02:00
|
|
|
repodir = os.path.join(DOWNLOADS, p.spackage, repo)
|
2014-01-01 15:53:52 +01:00
|
|
|
if not os.path.exists(repodir):
|
2014-03-04 14:34:16 +01:00
|
|
|
os.makedirs(repodir)
|
2014-01-01 15:53:52 +01:00
|
|
|
t = os.path.join(repodir, fn)
|
2014-03-04 14:34:16 +01:00
|
|
|
self._check_repo_get_binary(opts.apiurl, p.sproject, repo,
|
2014-01-21 13:50:48 +01:00
|
|
|
arch, p.spackage, fn, t, mt)
|
2014-01-01 15:53:52 +01:00
|
|
|
p.downloads[repo].append(t)
|
|
|
|
if fn.endswith('.rpm'):
|
2014-03-04 14:34:16 +01:00
|
|
|
pid = subprocess.Popen(['rpm', '--nosignature', '--queryformat', '%{DISTURL}', '-qp', t],
|
2014-01-01 15:53:52 +01:00
|
|
|
stdout=subprocess.PIPE, close_fds=True)
|
|
|
|
os.waitpid(pid.pid, 0)[1]
|
|
|
|
disturl = pid.stdout.readlines()[0]
|
|
|
|
|
|
|
|
if not self._checker_compare_disturl(disturl, p):
|
|
|
|
p.error = '[%s] %s does not match revision %s' % (p, disturl, p.rev)
|
2014-02-18 10:39:54 +01:00
|
|
|
return set()
|
2014-01-01 15:53:52 +01:00
|
|
|
|
|
|
|
toignore = set()
|
2013-07-25 14:18:28 +02:00
|
|
|
for fn in self._check_repo_repo_list(p.tproject, 'standard', 'x86_64', p.tpackage, opts, ignore=True):
|
2014-01-01 15:53:52 +01:00
|
|
|
toignore.add(fn[1])
|
2013-04-30 11:49:26 +02:00
|
|
|
|
2013-04-16 20:31:12 +02:00
|
|
|
# now fetch -32bit pack list
|
2013-07-25 14:18:28 +02:00
|
|
|
for fn in self._check_repo_repo_list(p.tproject, 'standard', 'i586', p.tpackage, opts, ignore=True):
|
2013-08-02 14:55:40 +02:00
|
|
|
if fn[2] == 'x86_64':
|
2014-01-01 15:53:52 +01:00
|
|
|
toignore.add(fn[1])
|
|
|
|
return toignore
|
2013-06-20 17:30:54 +02:00
|
|
|
|
2014-03-04 14:34:16 +01:00
|
|
|
|
2013-08-07 17:13:53 +02:00
|
|
|
def _get_buildinfo(self, opts, prj, repo, arch, pkg):
|
|
|
|
"""Get the build info for a package"""
|
2013-06-20 17:30:54 +02:00
|
|
|
xml = get_buildinfo(opts.apiurl, prj, pkg, repo, arch)
|
|
|
|
root = ET.fromstring(xml)
|
|
|
|
return [e.attrib['name'] for e in root.findall('bdep')]
|
|
|
|
|
|
|
|
|
2013-07-22 14:29:40 +02:00
|
|
|
def _check_repo_group(self, id_, reqs, opts):
|
2013-07-22 17:19:13 +02:00
|
|
|
print '\nCheck group', reqs
|
|
|
|
if not all(self._check_repo_buildsuccess(r, opts) for r in reqs):
|
|
|
|
return
|
|
|
|
|
2013-04-16 23:10:04 +02:00
|
|
|
# all succeeded
|
2014-01-01 15:53:52 +01:00
|
|
|
toignore = set()
|
2013-10-01 16:51:43 +02:00
|
|
|
destdir = os.path.expanduser('~/co/%s' % str(reqs[0].group))
|
2013-08-06 16:10:27 +02:00
|
|
|
fetched = dict((r, False) for r in opts.groups.get(id_, []))
|
2013-06-17 12:19:38 +02:00
|
|
|
packs = []
|
2014-01-01 15:53:52 +01:00
|
|
|
|
2013-04-16 23:10:04 +02:00
|
|
|
for p in reqs:
|
2014-01-01 15:53:52 +01:00
|
|
|
i = self._check_repo_download(p, opts)
|
2013-04-30 11:49:26 +02:00
|
|
|
if p.error:
|
2013-10-02 11:46:35 +02:00
|
|
|
if not p.updated:
|
2014-04-22 11:46:43 +02:00
|
|
|
print p.error
|
2013-10-02 11:46:35 +02:00
|
|
|
self._check_repo_change_review_state(opts, p.request, 'new', message=p.error)
|
|
|
|
p.updated = True
|
|
|
|
else:
|
|
|
|
print p.error
|
2013-04-30 11:49:26 +02:00
|
|
|
return
|
2014-01-01 15:53:52 +01:00
|
|
|
toignore.update(i)
|
2013-06-17 12:19:38 +02:00
|
|
|
fetched[p.request] = True
|
|
|
|
packs.append(p)
|
2013-04-30 11:49:26 +02:00
|
|
|
|
2013-04-18 13:33:25 +02:00
|
|
|
for req, f in fetched.items():
|
2014-03-04 14:34:16 +01:00
|
|
|
if not f:
|
2013-06-17 12:19:38 +02:00
|
|
|
packs.extend(self._check_repo_fetch_request(req, opts))
|
2013-04-18 13:33:25 +02:00
|
|
|
for p in packs:
|
2014-03-04 14:34:16 +01:00
|
|
|
if fetched[p.request]:
|
|
|
|
continue
|
2014-01-02 09:52:19 +01:00
|
|
|
# we need to call it to fetch the good repos to download
|
|
|
|
# but the return value is of no interest right now
|
|
|
|
self._check_repo_buildsuccess(p, opts)
|
2014-01-01 15:53:52 +01:00
|
|
|
i = self._check_repo_download(p, opts)
|
2013-04-30 11:49:26 +02:00
|
|
|
if p.error:
|
2013-10-25 16:11:31 +02:00
|
|
|
print 'ERROR (ALREADY ACEPTED?):', p.error
|
2013-06-17 12:19:38 +02:00
|
|
|
p.updated = True
|
2014-01-01 15:53:52 +01:00
|
|
|
toignore.update(i)
|
2013-04-30 11:49:26 +02:00
|
|
|
|
2014-05-26 16:03:06 +02:00
|
|
|
# Detect cycles into the current Factory graph after we update the
|
|
|
|
# links with the current list of request.
|
|
|
|
cycle_detector = CycleDetector(opts.apiurl)
|
|
|
|
cycle_detector.cycles(packs)
|
2013-08-02 14:55:40 +02:00
|
|
|
|
2013-04-30 11:49:26 +02:00
|
|
|
for p in reqs:
|
|
|
|
smissing = []
|
|
|
|
for package in p.missings:
|
2013-08-06 15:35:13 +02:00
|
|
|
alreadyin = False
|
2013-08-06 15:54:40 +02:00
|
|
|
# print package, packs
|
2013-04-30 11:49:26 +02:00
|
|
|
for t in packs:
|
2014-03-04 14:34:16 +01:00
|
|
|
if package == t.tpackage:
|
|
|
|
alreadyin = True
|
2013-04-30 11:49:26 +02:00
|
|
|
if alreadyin:
|
|
|
|
continue
|
2014-04-29 15:01:16 +02:00
|
|
|
# print package, packs, downloads, toignore
|
2013-04-30 11:49:26 +02:00
|
|
|
request = self._check_repo_find_submit_request(opts, p.tproject, package)
|
|
|
|
if request:
|
|
|
|
greqs = opts.groups.get(p.group, [])
|
2014-03-04 14:34:16 +01:00
|
|
|
if request in greqs:
|
|
|
|
continue
|
2013-10-01 16:51:43 +02:00
|
|
|
package = '%s(rq%s)' % (package, request)
|
2013-04-30 11:49:26 +02:00
|
|
|
smissing.append(package)
|
|
|
|
if len(smissing):
|
2013-10-01 16:51:43 +02:00
|
|
|
msg = 'Please make sure to wait before these depencencies are in %s: %s' % (p.tproject, ', '.join(smissing))
|
2013-10-02 11:46:35 +02:00
|
|
|
if not p.updated:
|
|
|
|
self._check_repo_change_review_state(opts, p.request, 'new', message=msg)
|
2014-04-22 11:46:43 +02:00
|
|
|
print msg
|
2013-10-02 11:46:35 +02:00
|
|
|
p.updated = True
|
|
|
|
else:
|
|
|
|
print msg
|
2013-04-30 11:49:26 +02:00
|
|
|
return
|
|
|
|
|
2013-09-04 10:33:48 +02:00
|
|
|
# Create a temporal file for the params
|
2013-09-04 10:44:45 +02:00
|
|
|
params_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
params_file.write('\n'.join(f for f in toignore if f.strip()))
|
2013-09-04 10:33:48 +02:00
|
|
|
params_file.close()
|
2014-01-01 15:53:52 +01:00
|
|
|
|
|
|
|
reposets = []
|
|
|
|
|
|
|
|
if len(packs) == 1:
|
|
|
|
p = packs[0]
|
|
|
|
for r in p.downloads.keys():
|
|
|
|
reposets.append([(p, r, p.downloads[r])])
|
|
|
|
else:
|
|
|
|
# TODO: for groups we just pick the first repo - we'd need to create a smart
|
|
|
|
# matrix
|
|
|
|
dirstolink = []
|
|
|
|
for p in packs:
|
2014-01-02 21:27:20 +01:00
|
|
|
keys = p.downloads.keys()
|
2014-03-04 14:34:16 +01:00
|
|
|
if not keys:
|
|
|
|
continue
|
2014-01-02 21:27:20 +01:00
|
|
|
r = keys[0]
|
2014-01-01 15:53:52 +01:00
|
|
|
dirstolink.append((p, r, p.downloads[r]))
|
|
|
|
reposets.append(dirstolink)
|
|
|
|
|
2014-01-23 12:01:00 +01:00
|
|
|
if len(reposets) == 0:
|
2014-03-04 14:34:16 +01:00
|
|
|
print 'NO REPOS'
|
|
|
|
return
|
2014-01-23 12:01:00 +01:00
|
|
|
|
2014-01-01 15:53:52 +01:00
|
|
|
for dirstolink in reposets:
|
|
|
|
if os.path.exists(destdir):
|
|
|
|
shutil.rmtree(destdir)
|
|
|
|
os.makedirs(destdir)
|
|
|
|
for p, repo, downloads in dirstolink:
|
|
|
|
dir = destdir + '/%s' % p.tpackage
|
|
|
|
for d in downloads:
|
2014-03-04 14:34:16 +01:00
|
|
|
if not os.path.exists(dir):
|
|
|
|
os.mkdir(dir)
|
2014-01-01 15:53:52 +01:00
|
|
|
os.symlink(d, os.path.join(dir, os.path.basename(d)))
|
|
|
|
|
2014-03-28 16:13:52 +01:00
|
|
|
repochecker = os.path.join(self.repocheckerdir, 'repo-checker.pl')
|
2014-04-22 11:46:43 +02:00
|
|
|
civs = "LC_ALL=C perl %s '%s' -r %s -f %s" % (repochecker, destdir, self.repodir, params_file.name)
|
2014-04-29 15:01:16 +02:00
|
|
|
# print civs
|
|
|
|
# continue
|
|
|
|
# exit(1)
|
2014-04-22 11:46:43 +02:00
|
|
|
p = subprocess.Popen(civs, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
|
2014-04-29 15:01:16 +02:00
|
|
|
# ret = os.waitpid(p.pid, 0)[1]
|
2014-04-22 11:46:43 +02:00
|
|
|
stdoutdata, stderrdata = p.communicate()
|
2014-01-01 15:53:52 +01:00
|
|
|
ret = p.returncode
|
2014-04-29 15:01:16 +02:00
|
|
|
# print ret, stdoutdata, stderrdata
|
2014-03-04 14:34:16 +01:00
|
|
|
if not ret: # skip the others
|
|
|
|
for p, repo, downloads in dirstolink:
|
|
|
|
p.goodrepo = repo
|
|
|
|
break
|
2013-09-04 11:13:26 +02:00
|
|
|
os.unlink(params_file.name)
|
2014-03-04 14:34:16 +01:00
|
|
|
|
|
|
|
updated = {}
|
2013-04-17 20:43:50 +02:00
|
|
|
|
|
|
|
if ret:
|
2014-04-29 15:01:16 +02:00
|
|
|
# print stdoutdata, set(map(lambda x: x.request, reqs))
|
2013-04-17 20:43:50 +02:00
|
|
|
|
|
|
|
for p in reqs:
|
2013-10-02 11:46:35 +02:00
|
|
|
if updated.get(p.request, False) or p.updated:
|
|
|
|
continue
|
2014-04-22 11:46:43 +02:00
|
|
|
print stdoutdata
|
|
|
|
self._check_repo_change_review_state(opts, p.request, 'new', message=stdoutdata)
|
2013-10-02 11:46:35 +02:00
|
|
|
p.updated = True
|
2013-04-17 20:43:50 +02:00
|
|
|
updated[p.request] = 1
|
|
|
|
return
|
|
|
|
for p in reqs:
|
2013-10-02 11:46:35 +02:00
|
|
|
if updated.get(p.request, False) or p.updated:
|
|
|
|
continue
|
2013-10-01 16:51:43 +02:00
|
|
|
msg = 'Builds for repo %s' % p.goodrepo
|
2013-10-02 11:46:35 +02:00
|
|
|
print 'ACCEPTED', msg
|
2013-04-17 20:43:50 +02:00
|
|
|
self._check_repo_change_review_state(opts, p.request, 'accepted', message=msg)
|
2013-10-02 11:46:35 +02:00
|
|
|
p.updated = True
|
2013-04-17 20:43:50 +02:00
|
|
|
updated[p.request] = 1
|
2013-04-18 13:33:25 +02:00
|
|
|
shutil.rmtree(destdir)
|
|
|
|
|
2013-06-20 17:30:54 +02:00
|
|
|
|
|
|
|
def _check_repo_fetch_request(self, id_, opts):
|
|
|
|
url = makeurl(opts.apiurl, ['request', str(id_)])
|
2013-07-25 13:38:11 +02:00
|
|
|
root = ET.parse(http_GET(url)).getroot()
|
2013-06-20 17:30:54 +02:00
|
|
|
return self._check_repo_one_request(root, opts)
|
|
|
|
|
2013-04-18 13:33:25 +02:00
|
|
|
|
2013-07-22 16:15:32 +02:00
|
|
|
@cmdln.alias('check', 'cr')
|
|
|
|
@cmdln.option('-s', '--skip', action='store_true', help='skip review')
|
2013-03-21 11:34:18 +01:00
|
|
|
def do_check_repo(self, subcmd, opts, *args):
|
2013-07-22 16:15:32 +02:00
|
|
|
"""${cmd_name}: Checker review of submit requests.
|
2013-03-21 11:34:18 +01:00
|
|
|
|
|
|
|
Usage:
|
2013-07-22 16:15:32 +02:00
|
|
|
${cmd_name} [SRID]...
|
2013-03-21 11:34:18 +01:00
|
|
|
Shows pending review requests and their current state.
|
|
|
|
${cmd_option_list}
|
|
|
|
"""
|
|
|
|
|
|
|
|
opts.mode = ''
|
|
|
|
opts.verbose = False
|
|
|
|
opts.apiurl = self.get_api_url()
|
2014-05-26 16:03:06 +02:00
|
|
|
|
|
|
|
checkrepo = CheckRepo(opts.apiurl)
|
|
|
|
|
|
|
|
# XXX TODO - Remove this the all access to opt.group[s|ed] comes
|
|
|
|
# from checkrepo.
|
|
|
|
opts.grouped = checkrepo.grouped
|
|
|
|
opts.groups = checkrepo.groups
|
2014-02-18 10:20:52 +01:00
|
|
|
|
2013-07-22 16:15:32 +02:00
|
|
|
if opts.skip:
|
|
|
|
if not len(args):
|
2014-05-26 16:03:06 +02:00
|
|
|
raise oscerr.WrongArgs('Provide #IDs to skip.')
|
|
|
|
|
2013-07-22 16:15:32 +02:00
|
|
|
for id_ in args:
|
2013-10-01 16:51:43 +02:00
|
|
|
msg = 'skip review'
|
|
|
|
print 'ACCEPTED', msg
|
|
|
|
self._check_repo_change_review_state(opts, id_, 'accepted', message=msg)
|
2013-03-21 11:34:18 +01:00
|
|
|
return
|
2013-06-20 17:30:54 +02:00
|
|
|
|
|
|
|
ids = [arg for arg in args if arg.isdigit()]
|
2013-03-21 11:34:18 +01:00
|
|
|
|
2013-04-16 22:41:52 +02:00
|
|
|
packs = []
|
2013-06-20 17:30:54 +02:00
|
|
|
if not ids:
|
2013-03-21 11:34:18 +01:00
|
|
|
# xpath query, using the -m, -r, -s options
|
|
|
|
where = "@by_user='factory-repo-checker'+and+@state='new'"
|
2014-03-04 14:34:16 +01:00
|
|
|
url = makeurl(opts.apiurl, ['search', 'request'],
|
2013-10-01 16:51:43 +02:00
|
|
|
"match=state/@name='review'+and+review[%s]" % where)
|
2013-03-21 11:34:18 +01:00
|
|
|
f = http_GET(url)
|
|
|
|
root = ET.parse(f).getroot()
|
|
|
|
for rq in root.findall('request'):
|
2013-04-16 23:10:04 +02:00
|
|
|
packs.extend(self._check_repo_one_request(rq, opts))
|
2013-03-21 11:34:18 +01:00
|
|
|
else:
|
|
|
|
# we have a list, use them.
|
2013-06-20 17:30:54 +02:00
|
|
|
for id_ in ids:
|
2014-03-04 14:34:16 +01:00
|
|
|
packs.extend(self._check_repo_fetch_request(id_, opts))
|
2013-04-16 22:41:52 +02:00
|
|
|
|
2014-03-18 14:53:43 +01:00
|
|
|
# Order the packs before grouping
|
|
|
|
packs = sorted(packs, key=lambda p: p.request, reverse=True)
|
|
|
|
|
2013-04-16 22:41:52 +02:00
|
|
|
groups = {}
|
|
|
|
for p in packs:
|
|
|
|
a = groups.get(p.group, [])
|
|
|
|
a.append(p)
|
|
|
|
groups[p.group] = a
|
|
|
|
|
2014-03-28 16:13:52 +01:00
|
|
|
self.repocheckerdir = os.path.dirname(os.path.realpath(os.path.expanduser('~/.osc-plugins/osc-check_repo.py')))
|
2014-05-26 16:03:06 +02:00
|
|
|
self.repodir = "%s/repo-%s-%s-x86_64" % (CACHEDIR, 'openSUSE:Factory', 'standard')
|
2014-03-28 16:13:52 +01:00
|
|
|
if not os.path.exists(self.repodir):
|
|
|
|
os.mkdir(self.repodir)
|
2014-04-29 15:01:16 +02:00
|
|
|
civs = 'LC_ALL=C perl %s/bs_mirrorfull --nodebug https://build.opensuse.org/build/%s/%s/x86_64 %s' % (
|
|
|
|
self.repocheckerdir,
|
|
|
|
'openSUSE:Factory',
|
|
|
|
'standard', self.repodir)
|
2014-03-28 16:13:52 +01:00
|
|
|
os.system(civs)
|
|
|
|
|
2014-03-18 14:53:43 +01:00
|
|
|
# Sort the groups, from high to low. This put first the stating
|
|
|
|
# projects also
|
|
|
|
for id_, reqs in sorted(groups.items(), reverse=True):
|
2013-07-22 14:36:15 +02:00
|
|
|
self._check_repo_group(id_, reqs, opts)
|