Refactor of check_one_request into checkrepo module.

This commit is contained in:
Alberto Planas 2014-06-12 18:25:45 +02:00
parent ac32dc9bbe
commit 550e98d6c2
5 changed files with 333 additions and 281 deletions

View File

@ -36,70 +36,6 @@ from osclib.memoize import CACHEDIR
DOWNLOADS = os.path.expanduser('~/co/downloads')
def get_project_repos(apiurl, src_project, tgt_project, src_package, rev):
"""Read the repositories of the project from _meta."""
# XXX TODO - Shitty logic here. A better proposal is refactorize
# _check_repo_buildsuccess.
repos = []
url = makeurl(apiurl,
['build', src_project,
'_result?lastsuccess&package=%s&pathproject=%s&srcmd5=%s' % (
quote_plus(src_package),
quote_plus(tgt_project),
rev)])
try:
root = ET.parse(http_GET(url)).getroot()
for element in root.findall('repository'):
archs = [(e.get('arch'), e.get('result')) for e in element.findall('arch')]
repos.append((element.get('name'), archs))
except urllib2.HTTPError, e:
print('ERROR in URL %s [%s]' % (url, e))
return repos
def old_md5(apiurl, src_project, tgt_project, src_package, rev):
"""Recollect old MD5 for a package."""
# XXX TODO - instead of fixing the limit, use endtime to make
# sure that we have the correct time frame.
limit = 20
query = {
'package': src_package,
# 'code': 'succeeded',
'limit': limit,
}
repositories = get_project_repos(apiurl, src_project, tgt_project,
src_package, rev)
srcmd5_list = []
for repository, archs in repositories:
for arch, status in archs:
if srcmd5_list:
break
if status not in ('succeeded', 'outdated'):
continue
url = makeurl(apiurl, ['build', src_project, repository, arch, '_jobhistory'],
query=query)
try:
root = ET.parse(http_GET(url)).getroot()
srcmd5_list = [e.get('srcmd5') for e in root.findall('jobhist')]
except urllib2.HTTPError, e:
print('ERROR in URL %s [%s]' % (url, e))
md5_set = set()
for srcmd5 in srcmd5_list:
query = {
'expand': 1,
'rev': srcmd5,
}
url = makeurl(apiurl, ['source', src_project, src_package], query=query)
root = ET.parse(http_GET(url)).getroot()
md5_set.add(root.find('linkinfo').get('srcmd5'))
return md5_set
def _check_repo_find_submit_request(self, opts, project, package):
xpath = "(action/target/@project='%s' and "\
"action/target/@package='%s' and "\
@ -130,112 +66,8 @@ def _check_repo_avoid_wrong_friends(self, prj, repo, arch, pkg, opts):
return True
def _check_repo_one_request(self, rq, opts):
class CheckRepoPackage:
def __repr__(self):
return '[%d:%s/%s]' % (int(self.request), self.sproject, self.spackage)
def __init__(self):
self.updated = False
self.error = None
self.build_excluded = False
id_ = int(rq.get('id'))
actions = rq.findall('action')
if len(actions) > 1:
msg = 'Only one action per request is supported'
print('DECLINED', msg)
self.checkrepo.change_review_state(id_, 'declined', message=msg)
return []
act = actions[0]
type_ = act.get('type')
if type_ != 'submit':
msg = 'Unchecked request type %s' % type_
print 'ACCEPTED', msg
self.checkrepo.change_review_state(id_, 'accepted', message=msg)
return []
pkg = act.find('source').get('package')
prj = act.find('source').get('project')
rev = act.find('source').get('rev')
tprj = act.find('target').get('project')
tpkg = act.find('target').get('package')
subm_id = 'SUBMIT(%d):' % id_
print '%s %s/%s -> %s/%s' % (subm_id, prj, pkg, tprj, tpkg)
packs = []
p = CheckRepoPackage()
p.spackage = pkg
p.sproject = prj
p.tpackage = tpkg
p.tproject = tprj
p.group = opts.grouped.get(id_, id_)
p.request = id_
# Get source information about the SR:
# - Source MD5
# - Entries (.tar.gz, .changes, .spec ...) and MD5
try:
url = makeurl(opts.apiurl, ['source', prj, pkg, '?expand=1&rev=%s' % rev])
root = ET.parse(http_GET(url)).getroot()
except urllib2.HTTPError, e:
print 'ERROR in URL %s [%s]' % (url, e)
return []
p.rev = root.attrib['srcmd5']
# Recover the .spec files
specs = [en.attrib['name'][:-5] for en in root.findall('entry')
if en.attrib['name'].endswith('.spec')]
# source checker validated it exists
specs.remove(tpkg)
packs.append(p)
# Validate the rest of the spec files
for spec in specs:
lprj, lpkg, lmd5 = '', '', ''
try:
url = makeurl(opts.apiurl, ['source', prj, spec, '?expand=1'])
root = ET.parse(http_GET(url)).getroot()
link = root.find('linkinfo')
if link is not None:
lprj = link.attrib.get('project', '')
lpkg = link.attrib.get('package', '')
lmd5 = link.attrib['srcmd5']
except urllib2.HTTPError:
pass # leave lprj
if lprj != prj or lpkg != pkg and not p.updated:
msg = '%s/%s should _link to %s/%s' % (prj, spec, prj, pkg)
print 'DECLINED', msg
self.checkrepo.change_review_state(id_, 'declined', message=msg)
p.updated = True
if lmd5 != p.rev and not p.updated:
if lmd5 not in old_md5(opts.apiurl, lprj, p.tproject, spec, p.rev):
msg = '%s/%s is a link but has a different md5sum than %s?' % (prj, spec, pkg)
else:
msg = '%s is no longer the submitted version, please resubmit HEAD' % spec
print '[DECLINED] CHECK MANUALLY', msg
# self.checkrepo.change_review_state(id_, 'declined', message=msg)
p.updated = True
sp = CheckRepoPackage()
sp.spackage = spec
sp.sproject = prj
sp.tpackage = spec
sp.tproject = tprj
sp.group = p.group
sp.request = id_
packs.append(sp)
sp.rev = root.attrib['srcmd5']
return packs
def _check_repo_buildsuccess(self, p, opts):
root_xml = self.checkrepo.last_build_success(p.sproject, p.tproject, p.spackage, p.rev)
def _check_repo_buildsuccess(self, request, opts):
root_xml = self.checkrepo.last_build_success(request.src_project, request.tgt_project, request.src_package, request.srcmd5)
root = ET.fromstring(root_xml)
if not root:
return False
@ -244,7 +76,7 @@ def _check_repo_buildsuccess(self, p, opts):
return False
result = False
p.goodrepos = []
request.goodrepos = []
missings = {}
alldisabled = True
foundbuilding = None
@ -260,9 +92,9 @@ def _check_repo_buildsuccess(self, p, opts):
if not tocheckrepos:
msg = 'Missing i586 and x86_64 in the repo list'
print msg
self.checkrepo.change_review_state(p.request, 'new', message=msg)
self.checkrepo.change_review_state(request.request_id, 'new', message=msg)
# Next line not needed, but for documentation
p.updated = True
request.updated = True
return False
for repo in tocheckrepos:
@ -276,12 +108,12 @@ def _check_repo_buildsuccess(self, p, opts):
continue
if 'missing' in arch.attrib:
for pkg in arch.attrib['missing'].split(','):
if not self._check_repo_avoid_wrong_friends(p.sproject, repo.attrib['name'], arch.attrib['arch'], pkg, opts):
if not self._check_repo_avoid_wrong_friends(request.src_project, repo.attrib['name'], arch.attrib['arch'], pkg, opts):
missings[pkg] = 1
if not (arch.attrib['result'] in ['succeeded', 'excluded']):
if arch.attrib['result'] not in ('succeeded', 'excluded'):
isgood = False
if arch.attrib['result'] == 'excluded' and arch.attrib['arch'] == 'x86_64':
p.build_excluded = True
request.build_excluded = True
if arch.attrib['result'] == 'disabled':
founddisabled = True
if arch.attrib['result'] == 'failed' or arch.attrib['result'] == 'unknown':
@ -292,11 +124,11 @@ def _check_repo_buildsuccess(self, p, opts):
if arch.attrib['result'] == 'building':
r_foundbuilding = repo.attrib['name']
if arch.attrib['result'] == 'outdated':
msg = "%s's sources were changed after submissions and the old sources never built. Please resubmit" % p.spackage
msg = "%s's sources were changed after submissions and the old sources never built. Please resubmit" % request.src_package
print 'DECLINED', msg
self.checkrepo.change_review_state(p.request, 'declined', message=msg)
self.checkrepo.change_review_state(request.request_id, 'declined', message=msg)
# Next line is not needed, but for documentation
p.updated = True
request.updated = True
return False
r_missings = r_missings.keys()
@ -305,39 +137,39 @@ def _check_repo_buildsuccess(self, p, opts):
if not founddisabled:
alldisabled = False
if isgood:
p.goodrepos.append(repo.attrib['name'])
request.goodrepos.append(repo.attrib['name'])
result = True
if r_foundbuilding:
foundbuilding = r_foundbuilding
if r_foundfailed:
foundfailed = r_foundfailed
p.missings = sorted(missings)
request.missings = sorted(missings)
if result:
return True
if alldisabled:
msg = '%s is disabled or does not build against factory. Please fix and resubmit' % p.spackage
msg = '%s is disabled or does not build against factory. Please fix and resubmit' % request.src_package
print 'DECLINED', msg
self.checkrepo.change_review_state(p.request, 'declined', message=msg)
self.checkrepo.change_review_state(request.request_id, 'declined', message=msg)
# Next line not needed, but for documentation
p.updated = True
request.updated = True
return False
if foundbuilding:
msg = '%s is still building for repository %s' % (p.spackage, foundbuilding)
msg = '%s is still building for repository %s' % (request.src_package, foundbuilding)
print msg
self.checkrepo.change_review_state(p.request, 'new', message=msg)
self.checkrepo.change_review_state(request.request_id, 'new', message=msg)
# Next line not needed, but for documentation
p.updated = True
request.updated = True
return False
if foundfailed:
msg = '%s failed to build in repository %s - not accepting' % (p.spackage, foundfailed)
msg = '%s failed to build in repository %s - not accepting' % (request.src_package, foundfailed)
# failures might be temporary, so don't autoreject but wait for a human to check
print msg
self.checkrepo.change_review_state(p.request, 'new', message=msg)
self.checkrepo.change_review_state(request.request_id, 'new', message=msg)
# Next line not needed, but for documentation
p.updated = True
request.updated = True
return False
return True
@ -380,9 +212,9 @@ def _check_repo_get_binary(self, apiurl, prj, repo, arch, package, file, target,
get_binary_file(apiurl, prj, repo, arch, file, package=package, target_filename=target)
def _get_verifymd5(self, p, rev):
def _get_verifymd5(self, request, rev):
try:
url = makeurl(self.get_api_url(), ['source', p.sproject, p.spackage, '?view=info&rev=%s' % rev])
url = makeurl(self.get_api_url(), ['source', request.src_project, request.src_package, '?view=info&rev=%s' % rev])
root = ET.parse(http_GET(url)).getroot()
except urllib2.HTTPError, e:
print 'ERROR in URL %s [%s]' % (url, e)
@ -390,29 +222,29 @@ def _get_verifymd5(self, p, rev):
return root.attrib['verifymd5']
def _checker_compare_disturl(self, disturl, p):
def _checker_compare_disturl(self, disturl, request):
distmd5 = os.path.basename(disturl).split('-')[0]
if distmd5 == p.rev:
if distmd5 == request.srcmd5:
return True
vrev1 = self._get_verifymd5(p, p.rev)
vrev2 = self._get_verifymd5(p, distmd5)
vrev1 = self._get_verifymd5(request, request.srcmd5)
vrev2 = self._get_verifymd5(request, distmd5)
if vrev1 == vrev2:
return True
print 'ERROR Revision missmatch: %s, %s' % (vrev1, vrev2)
return False
def _check_repo_download(self, p, opts):
p.downloads = dict()
def _check_repo_download(self, request, opts):
request.downloads = dict()
if p.build_excluded:
if request.build_excluded:
return set()
for repo in p.goodrepos:
for repo in request.goodrepos:
# we can assume x86_64 is there
todownload = []
for fn in self._check_repo_repo_list(p.sproject, repo, 'x86_64', p.spackage, opts):
for fn in self._check_repo_repo_list(request.src_project, repo, 'x86_64', request.src_package, opts):
todownload.append(('x86_64', fn[0], fn[3]))
# now fetch -32bit packs
@ -420,31 +252,31 @@ def _check_repo_download(self, p, opts):
# if fn[2] == 'x86_64':
# todownload.append(('i586', fn[0], fn[3]))
p.downloads[repo] = []
request.downloads[repo] = []
for arch, fn, mt in todownload:
repodir = os.path.join(DOWNLOADS, p.spackage, repo)
repodir = os.path.join(DOWNLOADS, request.src_package, repo)
if not os.path.exists(repodir):
os.makedirs(repodir)
t = os.path.join(repodir, fn)
self._check_repo_get_binary(opts.apiurl, p.sproject, repo,
arch, p.spackage, fn, t, mt)
p.downloads[repo].append(t)
self._check_repo_get_binary(opts.apiurl, request.src_project, repo,
arch, request.src_package, fn, t, mt)
request.downloads[repo].append(t)
if fn.endswith('.rpm'):
pid = subprocess.Popen(['rpm', '--nosignature', '--queryformat', '%{DISTURL}', '-qp', t],
stdout=subprocess.PIPE, close_fds=True)
os.waitpid(pid.pid, 0)[1]
disturl = pid.stdout.readlines()[0]
if not self._checker_compare_disturl(disturl, p):
p.error = '[%s] %s does not match revision %s' % (p, disturl, p.rev)
if not self._checker_compare_disturl(disturl, request):
request.error = '[%s] %s does not match revision %s' % (request, disturl, request.srcmd5)
return set()
toignore = set()
for fn in self._check_repo_repo_list(p.tproject, 'standard', 'x86_64', p.tpackage, opts, ignore=True):
for fn in self._check_repo_repo_list(request.tgt_project, 'standard', 'x86_64', request.tgt_package, opts, ignore=True):
toignore.add(fn[1])
# now fetch -32bit pack list
for fn in self._check_repo_repo_list(p.tproject, 'standard', 'i586', p.tpackage, opts, ignore=True):
for fn in self._check_repo_repo_list(request.tgt_project, 'standard', 'i586', request.tgt_package, opts, ignore=True):
if fn[2] == 'x86_64':
toignore.add(fn[1])
return toignore
@ -457,82 +289,81 @@ def _get_buildinfo(self, opts, prj, repo, arch, pkg):
return [e.attrib['name'] for e in root.findall('bdep')]
def _check_repo_group(self, id_, reqs, opts):
print '\nCheck group', reqs
if not all(self._check_repo_buildsuccess(r, opts) for r in reqs):
def _check_repo_group(self, id_, requests, opts):
print '\nCheck group', requests
if not all(self._check_repo_buildsuccess(r, opts) for r in requests):
return
# all succeeded
toignore = set()
destdir = os.path.expanduser('~/co/%s' % str(reqs[0].group))
destdir = os.path.expanduser('~/co/%s' % str(requests[0].group))
fetched = dict((r, False) for r in opts.groups.get(id_, []))
packs = []
for p in reqs:
i = self._check_repo_download(p, opts)
if p.error:
if not p.updated:
print p.error
self.checkrepo.change_review_state(p.request, 'new', message=p.error)
p.updated = True
for request in requests:
i = self._check_repo_download(request, opts)
if request.error:
if not request.updated:
print request.error
self.checkrepo.change_review_state(request.request_id, 'new', message=request.error)
request.updated = True
else:
print p.error
print request.error
return
toignore.update(i)
fetched[p.request] = True
packs.append(p)
fetched[request.request_id] = True
packs.append(request)
for request_id, f in fetched.items():
if not f:
request = self.checkrepo.get_request(request_id)
packs.extend(self._check_repo_one_request(request, opts))
for p in packs:
if fetched[p.request]:
packs.extend(self.checkrepo.check_specs(request_id=request_id))
for rq in packs:
if fetched[rq.request_id]:
continue
# we need to call it to fetch the good repos to download
# but the return value is of no interest right now
self._check_repo_buildsuccess(p, opts)
i = self._check_repo_download(p, opts)
if p.error:
print 'ERROR (ALREADY ACEPTED?):', p.error
p.updated = True
self._check_repo_buildsuccess(rq, opts)
i = self._check_repo_download(rq, opts)
if rq.error:
print 'ERROR (ALREADY ACEPTED?):', rq.error
rq.updated = True
toignore.update(i)
# Detect cycles into the current Factory graph after we update the
# links with the current list of request.
cycle_detector = CycleDetector(opts.apiurl)
for (cycle, new_edges) in cycle_detector.cycles(packages=packs):
for (cycle, new_edges) in cycle_detector.cycles(requests=packs):
print
print 'New cycle detected:', sorted(cycle)
print 'New edges:', new_edges
# Mark all packages as updated, to avoid to be accepted
for p in reqs:
p.updated = True
for request in requests:
request.updated = True
for p in reqs:
for rq in requests:
smissing = []
for package in p.missings:
for package in rq.missings:
alreadyin = False
# print package, packs
for t in packs:
if package == t.tpackage:
if package == t.tgt_package:
alreadyin = True
if alreadyin:
continue
# print package, packs, downloads, toignore
request = self._check_repo_find_submit_request(opts, p.tproject, package)
request = self._check_repo_find_submit_request(opts, rq.tgt_project, package)
if request:
greqs = opts.groups.get(p.group, [])
greqs = opts.groups.get(rq.group, [])
if request in greqs:
continue
package = '%s(rq%s)' % (package, request)
smissing.append(package)
if len(smissing):
msg = 'Please make sure to wait before these depencencies are in %s: %s' % (p.tproject, ', '.join(smissing))
if not p.updated:
self.checkrepo.change_review_state(p.request, 'new', message=msg)
msg = 'Please make sure to wait before these depencencies are in %s: %s' % (rq.tgt_project, ', '.join(smissing))
if not rq.updated:
self.checkrepo.change_review_state(rq.request_id, 'new', message=msg)
print msg
p.updated = True
rq.updated = True
else:
print msg
return
@ -552,12 +383,12 @@ def _check_repo_group(self, id_, reqs, opts):
# TODO: for groups we just pick the first repo - we'd need to create a smart
# matrix
dirstolink = []
for p in packs:
keys = p.downloads.keys()
for rq in packs:
keys = rq.downloads.keys()
if not keys:
continue
r = keys[0]
dirstolink.append((p, r, p.downloads[r]))
dirstolink.append((rq, r, rq.downloads[r]))
reposets.append(dirstolink)
if len(reposets) == 0:
@ -568,8 +399,8 @@ def _check_repo_group(self, id_, reqs, opts):
if os.path.exists(destdir):
shutil.rmtree(destdir)
os.makedirs(destdir)
for p, repo, downloads in dirstolink:
dir = destdir + '/%s' % p.tpackage
for rq, repo, downloads in dirstolink:
dir = destdir + '/%s' % rq.tgt_package
for d in downloads:
if not os.path.exists(dir):
os.mkdir(dir)
@ -594,24 +425,24 @@ def _check_repo_group(self, id_, reqs, opts):
updated = {}
if ret:
# print stdoutdata, set(map(lambda x: x.request, reqs))
# print stdoutdata, set(map(lambda x: x.request_id, reqs))
for p in reqs:
if updated.get(p.request, False) or p.updated:
for rq in requests:
if updated.get(rq.request_id, False) or rq.updated:
continue
print stdoutdata
self.checkrepo.change_review_state(p.request, 'new', message=stdoutdata)
self.checkrepo.change_review_state(rq.request_id, 'new', message=stdoutdata)
p.updated = True
updated[p.request] = 1
updated[rq.request_id] = 1
return
for p in reqs:
if updated.get(p.request, False) or p.updated:
for rq in requests:
if updated.get(rq.request_id, False) or rq.updated:
continue
msg = 'Builds for repo %s' % p.goodrepo
msg = 'Builds for repo %s' % rq.goodrepo
print 'ACCEPTED', msg
self.checkrepo.change_review_state(p.request, 'accepted', message=msg)
p.updated = True
updated[p.request] = 1
self.checkrepo.change_review_state(rq.request_id, 'accepted', message=msg)
rq.updated = True
updated[rq.request_id] = 1
shutil.rmtree(destdir)
@ -649,25 +480,26 @@ def do_check_repo(self, subcmd, opts, *args):
ids = [arg for arg in args if arg.isdigit()]
packs = []
# Store requests' package information and .spec files: store all
# source containers involved.
requests = []
if not ids:
# Return a list, we flat here with .extend()
for request in self.checkrepo.pending_requests():
packs.extend(self._check_repo_one_request(request, opts))
requests.extend(self.checkrepo.check_specs(request=request))
else:
# We have a list, use them.
for request_id in ids:
request = self.checkrepo.get_request(request_id)
packs.extend(self._check_repo_one_request(request, opts))
requests.extend(self.checkrepo.check_specs(request_id=request_id))
# Order the packs before grouping
packs = sorted(packs, key=lambda p: p.request, reverse=True)
requests = sorted(requests, key=lambda p: p.request_id, reverse=True)
groups = {}
for p in packs:
a = groups.get(p.group, [])
a.append(p)
groups[p.group] = a
for request in requests:
a = groups.get(request.group, [])
a.append(request)
groups[request.group] = a
self.repocheckerdir = os.path.dirname(os.path.realpath(os.path.expanduser('~/.osc-plugins/osc-check_repo.py')))
self.repodir = "%s/repo-%s-%s-x86_64" % (CACHEDIR, 'openSUSE:Factory', 'standard')

View File

@ -25,6 +25,50 @@ from osclib.stagingapi import StagingAPI
from osclib.memoize import memoize
class Request(object):
"""Simple request container."""
def __init__(self, request_id=None, src_project=None,
src_package=None, tgt_project=None, tgt_package=None,
revision=None, srcmd5=None, group=None, element=None):
self.request_id = request_id
self.src_project = src_project
self.src_package = src_package
self.tgt_project = tgt_project
self.tgt_package = tgt_package
self.revision = revision
self.srcmd5 = srcmd5
self.group = group
self.updated = False
self.error = None
self.build_excluded = False
if element:
self.load(element)
def load(self, element):
"""Load a node from a ElementTree request XML element."""
self.request_id = int(element.get('id'))
action = element.find('action')
self.src_project = action.find('source').get('project')
self.src_package = action.find('source').get('package')
self.revision = action.find('source').get('rev')
self.tgt_project = action.find('target').get('project')
self.tgt_package = action.find('target').get('package')
# The groups are in the CheckRepo object.
self.group = self.request_id
def __repr__(self):
return 'SUBMIT(%s) %s/%s -> %s/%s' % (self.request_id,
self.src_project,
self.src_package,
self.tgt_project,
self.tgt_package)
class CheckRepo(object):
def __init__(self, apiurl):
@ -39,9 +83,9 @@ class CheckRepo(object):
self._staging()
def _staging(self):
"""
Preload the groups of related request associated by the same
"""Preload the groups of related request associated by the same
staging project.
"""
for project in self.staging.get_staging_projects():
# Get all the requests identifier for the project
@ -125,3 +169,174 @@ class CheckRepo(object):
except urllib2.HTTPError, e:
print('ERROR in URL %s [%s]' % (url, e))
return xml
def get_project_repos(self, src_project, tgt_project, src_package, rev):
"""Read the repositories of the project from _meta."""
# XXX TODO - Shitty logic here. A better proposal is refactorize
# _check_repo_buildsuccess.
repos = []
url = makeurl(self.apiurl,
('build', src_project,
'_result?lastsuccess&package=%s&pathproject=%s&srcmd5=%s' % (
quote_plus(src_package),
quote_plus(tgt_project),
rev)))
try:
root = ET.parse(http_GET(url)).getroot()
for element in root.findall('repository'):
archs = [(e.get('arch'), e.get('result')) for e in element.findall('arch')]
repos.append((element.get('name'), archs))
except urllib2.HTTPError, e:
print('ERROR in URL %s [%s]' % (url, e))
return repos
def old_md5(self, src_project, tgt_project, src_package, rev):
"""Recollect old MD5 for a package."""
# XXX TODO - instead of fixing the limit, use endtime to make
# sure that we have the correct time frame.
limit = 20
query = {
'package': src_package,
# 'code': 'succeeded',
'limit': limit,
}
repositories = self.get_project_repos(src_project,
tgt_project,
src_package, rev)
srcmd5_list = []
for repository, archs in repositories:
for arch, status in archs:
if srcmd5_list:
break
if status not in ('succeeded', 'outdated'):
continue
url = makeurl(self.apiurl, ('build', src_project,
repository, arch,
'_jobhistory'),
query=query)
try:
root = ET.parse(http_GET(url)).getroot()
srcmd5_list = [e.get('srcmd5') for e in root.findall('jobhist')]
except urllib2.HTTPError, e:
print('ERROR in URL %s [%s]' % (url, e))
md5_set = set()
for srcmd5 in srcmd5_list:
query = {
'expand': 1,
'rev': srcmd5,
}
url = makeurl(self.apiurl, ('source', src_project, src_package), query=query)
root = ET.parse(http_GET(url)).getroot()
md5_set.add(root.find('linkinfo').get('srcmd5'))
return md5_set
def check_specs(self, request_id=None, request=None):
"""Check a single request and load the different SPECs files.
This method have side effects, it can ACCEPT or DECLINE
requests after some checks.
"""
requests = []
if request_id:
request = self.get_request(request_id)
elif request:
request_id = int(request.get('id'))
else:
raise Exception('Please, provide a request_id or a request XML object.')
# Check that only one action is allowed in the request.
actions = request.findall('action')
if len(actions) > 1:
msg = 'Only one action per request is supported'
print('DECLINED', msg)
self.change_review_state(request_id, 'declined', message=msg)
return requests
# Accept requests that are not SUBMIT type.
# XXX TODO - DELETE requests need to be managed here too.
action = actions[0]
action_type = action.get('type')
if action_type != 'submit':
msg = 'Unchecked request type %s' % action_type
print 'ACCEPTED', msg
self.change_review_state(request_id, 'accepted', message=msg)
return requests
rq = Request(element=request)
print rq
rq.group = self.grouped.get(request_id, request_id)
requests.append(rq)
# Get source information about the SR:
# - Source MD5
# - Entries (.tar.gz, .changes, .spec ...) and MD5
try:
url = makeurl(self.apiurl, ['source', rq.src_project, rq.src_package],
{'rev': rq.revision, 'expand': 1})
root = ET.parse(http_GET(url)).getroot()
except urllib2.HTTPError, e:
print 'ERROR in URL %s [%s]' % (url, e)
return requests
rq.srcmd5 = root.attrib['srcmd5']
# Recover the .spec files
specs = [en.attrib['name'][:-5] for en in root.findall('entry')
if en.attrib['name'].endswith('.spec')]
# source checker validated it exists
specs.remove(rq.src_package)
# Makes sure that the .spec file builds properly.
# In OBS the source container is the place where all the .spec
# files and .tgz files are stored, and used to build a binary
# package (.RPM) and a source package (.SRC.RPM)
#
# There are some rules in OBS here that we need to know:
#
# - There must be a .spec file that have the same name that
# the source container. For example, if the source
# container is python3-Pillow, we need a
# python3-Pillow.spec file.
#
# - If there are more .spec files, in case that we want to
# - build more packages, this is represented as a new source
# - container in OBS, that is a link to the original one but
# - with the name of the .spec file.
for spec in specs:
spec_info = self.staging.get_package_information(rq.src_project, spec)
if (spec_info['project'] != rq.src_project
or spec_info['package'] != rq.src_package) and not rq.updated:
msg = '%s/%s should _link to %s/%s' % (rq.src_project, spec, rq.src_project, rq.src_package)
print 'DECLINED', msg
self.change_review_state(rq.request_id, 'declined', message=msg)
rq.updated = True
if spec_info['srcmd5'] != rq.srcmd5 and not rq.updated:
if spec_info['srcmd5'] not in self.old_md5(rq.src_project, rq.tgt_project, spec, rq.srcmd5):
msg = '%s/%s is a link but has a different md5sum than %s?' % (rq.src_project, spec, rq.src_package)
else:
msg = '%s is no longer the submitted version, please resubmit HEAD' % spec
print '[DECLINED] CHECK MANUALLY', msg
# self.checkrepo.change_review_state(id_, 'declined', message=msg)
rq.updated = True
sp = Request(request_id=rq.request_id,
src_project=rq.src_project, src_package=spec,
tgt_project=rq.tgt_project, tgt_package=spec,
revision=None, srcmd5=spec_info['dir_srcmd5'],
group=rq.group)
requests.append(sp)
return requests

View File

@ -228,7 +228,7 @@ class CycleDetector(object):
return frozenset(frozenset(e.text for e in cycle.findall('package'))
for cycle in root.findall('cycle'))
def cycles(self, packages, project='openSUSE:Factory', repository='standard', arch='x86_64'):
def cycles(self, requests, project='openSUSE:Factory', repository='standard', arch='x86_64'):
"""Detect cycles in a specific repository."""
# Detect cycles - We create the full graph from _builddepinfo.
@ -244,11 +244,11 @@ class CycleDetector(object):
# can't be found in x86_64 architecture.
#
# The first filter is to remove some packages that do not have
# `goodrepos`. Those packages are usually marked as 'p.update
# `goodrepos`. Those packages are usually marked as 'rq.update
# = True' (meaning that they are declined or there is a new
# updated review).
all_packages = [self._get_builddepinfo(p.sproject, p.goodrepos[0], arch, p.spackage)
for p in packages if not p.updated]
all_packages = [self._get_builddepinfo(rq.src_project, rq.goodrepos[0], arch, rq.src_package)
for rq in requests if not rq.updated]
all_packages = [pkg for pkg in all_packages if pkg]
subpkgs.update(dict((p, pkg.pkg) for pkg in all_packages for p in pkg.subs))

View File

@ -130,11 +130,15 @@ class StagingAPI(object):
url = self.makeurl(['source', project, pkgname])
content = http_GET(url)
root = ET.parse(content).getroot().find('linkinfo')
package_info['srcmd5'] = root.attrib['srcmd5']
package_info['rev'] = root.attrib['rev']
package_info['project'] = root.attrib['project']
package_info['package'] = root.attrib['package']
root = ET.parse(content).getroot()
package_info['dir_srcmd5'] = root.attrib['srcmd5']
linkinfo = root.find('linkinfo')
package_info['srcmd5'] = linkinfo.attrib['srcmd5']
package_info['rev'] = linkinfo.attrib.get('rev', None)
package_info['project'] = linkinfo.attrib['project']
package_info['package'] = linkinfo.attrib['package']
return package_info
def move_between_project(self, source_project, req_id,

View File

@ -114,6 +114,7 @@ class TestApiCalls(unittest.TestCase):
"""
package_info = {
'dir_srcmd5': '751efeae52d6c99de48164088a33d855',
'project': 'home:Admin',
'rev': '7b98ac01b8071d63a402fa99dc79331c',
'srcmd5': '7b98ac01b8071d63a402fa99dc79331c',