2013-03-21 11:34:18 +01:00
#
# (C) 2011 coolo@suse.de, Novell Inc, openSUSE.org
# Distribute under GPLv2 or GPLv3
#
# Copy this script to ~/.osc-plugins/ or /var/lib/osc-plugins .
2013-06-20 17:30:54 +02:00
# Then try to run 'osc check_repo --help' to see the usage.
2013-03-21 11:34:18 +01:00
2013-07-24 15:34:08 +02:00
import cPickle
from datetime import datetime
from functools import wraps
2013-03-21 11:34:18 +01:00
import os
2013-07-24 16:54:55 +02:00
import shelve
2013-06-20 17:30:54 +02:00
import re
2013-03-21 11:34:18 +01:00
import subprocess
2013-06-20 17:30:54 +02:00
import shutil
from urllib import quote_plus
import urllib2
from xml . etree import cElementTree as ET
2013-07-24 16:54:55 +02:00
from osc import oscerr
from osc import cmdln
from osc . core import get_binary_file
from osc . core import get_buildinfo
from osc . core import http_GET
from osc . core import http_POST
from osc . core import makeurl
from osc . core import Request
#
# Ugly hack -- because the way that osc import plugings we need to
# declase some functions and objects used in the decorator as global
#
global cPickle
global datetime
global shelve
global wraps
2013-06-20 17:30:54 +02:00
2013-07-24 16:58:36 +02:00
global memoize
global last_build_success
2013-06-20 17:30:54 +02:00
2013-07-24 15:34:08 +02:00
def memoize ( f ) :
""" Decorator function to implement a persistent cache.
>> > @memoize
. . . def test_func ( a ) :
. . . return a
Internally , the memoized function has a cache :
>> > cache = [ c . cell_contents for c in test_func . func_closure if ' sync ' in dir ( c . cell_contents ) ] [ 0 ]
>> > ' sync ' in dir ( cache )
True
There is a limit of the size of the cache
>> > for k in cache :
. . . del cache [ k ]
>> > len ( cache )
0
>> > for i in range ( 4095 ) :
. . . test_func ( i )
. . . len ( cache )
4095
>> > test_func ( 0 )
0
>> > len ( cache )
4095
>> > test_func ( 4095 )
4095
>> > len ( cache )
3072
>> > test_func ( 0 )
0
>> > len ( cache )
3073
>> > from datetime import timedelta
>> > k = [ k for k in cache if cPickle . loads ( k ) == ( ( 0 , ) , { } ) ] [ 0 ]
>> > t , v = cache [ k ]
>> > t = t - timedelta ( days = 10 )
>> > cache [ k ] = ( t , v )
>> > test_func ( 0 )
0
>> > t2 , v = cache [ k ]
>> > t != t2
True
"""
# Configuration variables
TMPDIR = ' /tmp ' # Where the cache files are stored
SLOTS = 4096 # Number of slots in the cache file
NCLEAN = 1024 # Number of slots to remove when limit reached
2013-07-25 13:38:11 +02:00
TIMEOUT = 60 * 60 * 2 # Time to live for every cache slot (seconds)
2013-07-24 15:34:08 +02:00
def _clean_cache ( ) :
len_cache = len ( cache )
if len_cache > = SLOTS :
nclean = NCLEAN + len_cache - SLOTS
keys_to_delete = sorted ( cache , key = lambda k : cache [ k ] [ 0 ] ) [ : nclean ]
for key in keys_to_delete :
del cache [ key ]
@wraps ( f )
def _f ( * args , * * kwargs ) :
now = datetime . now ( )
key = cPickle . dumps ( ( args , kwargs ) , protocol = - 1 )
updated = False
if key in cache :
timestamp , value = cache [ key ]
updated = True if ( now - timestamp ) . total_seconds ( ) < TIMEOUT else False
if not updated :
value = f ( * args , * * kwargs )
cache [ key ] = ( now , value )
_clean_cache ( )
return value
cache_name = os . path . join ( TMPDIR , f . __name__ )
cache = shelve . open ( cache_name , protocol = - 1 )
return _f
2013-06-20 17:30:54 +02:00
def _check_repo_change_review_state ( self , opts , id_ , newstate , message = ' ' , supersed = None ) :
2013-07-22 14:36:15 +02:00
""" Taken from osc/osc/core.py, improved:
- verbose option added ,
- empty by_user = & removed .
- numeric id can be int ( ) .
2013-03-21 11:34:18 +01:00
"""
2013-06-20 17:30:54 +02:00
query = {
' cmd ' : ' changereviewstate ' ,
' newstate ' : newstate ,
' by_user ' : ' factory-repo-checker ' ,
}
if supersed :
query [ ' superseded_by ' ] = supersed
# if message:
# query['comment'] = message
code = 404
u = makeurl ( opts . apiurl , [ ' request ' , str ( id_ ) ] , query = query )
2013-04-30 11:49:26 +02:00
try :
2013-06-20 17:30:54 +02:00
f = http_POST ( u , data = message )
root = ET . parse ( f ) . getroot ( )
code = root . attrib [ ' code ' ]
except urllib2 . HTTPError , e :
print ' ERROR in URL %s [ %s ] ' % ( u , e )
return code
2013-03-21 11:34:18 +01:00
def _check_repo_find_submit_request ( self , opts , project , package ) :
2013-04-16 22:41:52 +02:00
xpath = " (action/target/@project= ' %s ' and action/target/@package= ' %s ' and action/@type= ' submit ' and (state/@name= ' new ' or state/@name= ' review ' or state/@name= ' accepted ' )) " % ( project , package )
2013-04-17 20:43:50 +02:00
try :
url = makeurl ( opts . apiurl , [ ' search ' , ' request ' ] , ' match= %s ' % quote_plus ( xpath ) )
f = http_GET ( url )
collection = ET . parse ( f ) . getroot ( )
2013-07-22 17:19:13 +02:00
except urllib2 . HTTPError , e :
print ' ERROR in URL %s [ %s ] ' % ( url , e )
2013-04-17 20:43:50 +02:00
return None
2013-04-16 22:41:52 +02:00
for root in collection . findall ( ' request ' ) :
r = Request ( )
r . read ( root )
2013-04-18 13:33:25 +02:00
return int ( r . reqid )
2013-04-16 22:41:52 +02:00
return None
2013-06-20 17:30:54 +02:00
2013-03-21 11:34:18 +01:00
2013-04-18 13:33:25 +02:00
def _check_repo_fetch_group ( self , opts , group ) :
2013-07-22 17:19:13 +02:00
if group in opts . groups :
return
2013-07-25 13:38:11 +02:00
url = makeurl ( opts . apiurl , [ ' request ' , str ( group ) ] )
root = ET . parse ( http_GET ( url ) ) . getroot ( )
# Every opts.groups[group_id] will contains the list of ids that
# conform the group
groups = [ int ( req . attrib [ ' id ' ] ) for req in root . find ( ' action ' ) . findall ( ' grouped ' ) ]
opts . groups [ group ] = groups
# opts.grouped[id] will point to the group_id which belongs to
grouped = { id_ : group for id_ in groups }
opts . grouped . update ( grouped )
2013-04-18 13:33:25 +02:00
2013-06-20 17:30:54 +02:00
2013-04-18 13:33:25 +02:00
def _check_repo_avoid_wrong_friends ( self , prj , repo , arch , pkg , opts ) :
try :
url = makeurl ( opts . apiurl , [ " build " , prj , repo , arch , pkg ] )
root = ET . parse ( http_GET ( url ) ) . getroot ( )
2013-07-24 16:54:55 +02:00
except urllib2 . HTTPError , e :
2013-07-22 17:19:13 +02:00
print ' ERROR in URL %s [ %s ] ' % ( url , e )
2013-04-18 13:33:25 +02:00
return False
for binary in root . findall ( ' binary ' ) :
# if there are binaries, we're out
return False
return True
2013-06-20 17:30:54 +02:00
2013-04-16 23:10:04 +02:00
def _check_repo_one_request ( self , rq , opts ) :
2013-04-16 22:41:52 +02:00
class CheckRepoPackage :
def __repr__ ( self ) :
2013-06-20 17:30:54 +02:00
return ' [ %d : %s / %s ] ' % ( int ( self . request ) , self . sproject , self . spackage )
2013-04-18 13:33:25 +02:00
def __init__ ( self ) :
self . updated = False
2013-04-30 11:49:26 +02:00
self . error = None
2013-05-13 12:48:21 +02:00
self . build_excluded = False
2013-04-16 22:41:52 +02:00
2013-07-22 14:29:40 +02:00
id_ = int ( rq . get ( ' id ' ) )
2013-03-21 11:34:18 +01:00
actions = rq . findall ( ' action ' )
2013-07-22 14:29:40 +02:00
if len ( actions ) > 1 :
msg = ' only one action per request is supported - create a group instead: ' \
' https://github.com/SUSE/hackweek/wiki/Improved-Factory-devel-project-submission-workflow '
2013-07-22 17:19:13 +02:00
print ' DECLINED ' , msg
2013-07-22 14:29:40 +02:00
self . _check_repo_change_review_state ( opts , id_ , ' declined ' , message = msg )
2013-04-16 22:41:52 +02:00
return [ ]
2013-03-21 11:34:18 +01:00
2013-04-16 20:31:12 +02:00
act = actions [ 0 ]
2013-07-22 14:29:40 +02:00
type_ = act . get ( ' type ' )
if type_ != ' submit ' :
self . _check_repo_change_review_state ( opts , id_ , ' accepted ' ,
message = ' Unchecked request type %s ' % type_ )
2013-04-16 22:41:52 +02:00
return [ ]
2013-03-21 11:34:18 +01:00
2013-04-16 20:31:12 +02:00
pkg = act . find ( ' source ' ) . get ( ' package ' )
prj = act . find ( ' source ' ) . get ( ' project ' )
rev = act . find ( ' source ' ) . get ( ' rev ' )
tprj = act . find ( ' target ' ) . get ( ' project ' )
tpkg = act . find ( ' target ' ) . get ( ' package ' )
2013-03-21 11:34:18 +01:00
2013-07-22 14:29:40 +02:00
subm_id = ' SUBMIT( %d ): ' % id_
print ' %s %s / %s -> %s / %s ' % ( subm_id ,
2013-04-17 20:43:50 +02:00
prj , pkg ,
tprj , tpkg )
2013-04-16 22:41:52 +02:00
2013-07-22 14:29:40 +02:00
group = id_
2013-04-16 22:41:52 +02:00
try :
2013-07-22 16:15:32 +02:00
if id_ in opts . grouped :
2013-07-22 14:29:40 +02:00
group = opts . grouped [ id_ ]
2013-04-30 11:49:26 +02:00
else :
2013-07-25 13:38:11 +02:00
# Search in which group this id_ is included. The result
# in an XML document pointing to a single submit request
# ID if this id_ is actually part of a group
2013-07-22 16:15:32 +02:00
url = makeurl ( opts . apiurl , [ ' search ' , ' request ' , ' id?match=action/grouped/@id= %s ' % id_ ] )
2013-04-30 11:49:26 +02:00
root = ET . parse ( http_GET ( url ) ) . getroot ( )
2013-07-22 16:15:32 +02:00
reqs = root . findall ( ' request ' )
if reqs :
group = int ( reqs [ 0 ] . attrib [ ' id ' ] )
2013-07-25 13:38:11 +02:00
# Recover the full group description, with more SRIDs
# and populate opts.group and opts.grouped
2013-04-30 11:49:26 +02:00
self . _check_repo_fetch_group ( opts , group )
2013-07-25 13:38:11 +02:00
except urllib2 . HTTPError , e :
print ' ERROR in URL %s [ %s ] ' % ( url , e )
return [ ]
2013-04-16 22:41:52 +02:00
packs = [ ]
p = CheckRepoPackage ( )
p . spackage = pkg
p . sproject = prj
p . tpackage = tpkg
p . tproject = tprj
p . group = group
2013-07-22 14:29:40 +02:00
p . request = id_
2013-07-25 13:38:11 +02:00
# Get source information about the SR:
# - Source MD5
# - Entries (.tar.gz, .changes, .spec ...) and MD5
2013-04-16 20:31:12 +02:00
try :
2013-07-22 16:15:32 +02:00
url = makeurl ( opts . apiurl , [ ' source ' , prj , pkg , ' ?expand=1&rev= %s ' % rev ] )
2013-04-16 20:31:12 +02:00
root = ET . parse ( http_GET ( url ) ) . getroot ( )
2013-07-22 17:19:13 +02:00
except urllib2 . HTTPError , e :
print ' ERROR in URL %s [ %s ] ' % ( url , e )
2013-04-16 22:41:52 +02:00
return [ ]
p . rev = root . attrib [ ' srcmd5 ' ]
2013-07-25 13:38:11 +02:00
# Recover the .spec files
specs = [ e . attrib [ ' name ' ] [ : - 5 ] for e in root . findall ( ' entry ' ) if e . attrib [ ' name ' ] . endswith ( ' .spec ' ) ]
2013-04-16 22:41:52 +02:00
# source checker validated it exists
specs . remove ( tpkg )
packs . append ( p )
2013-07-25 13:38:11 +02:00
# Validate the rest of the spec files
2013-04-16 22:41:52 +02:00
for spec in specs :
2013-07-22 16:15:32 +02:00
lprj , lpkg , lmd5 = ' ' , ' ' , ' '
2013-04-16 22:41:52 +02:00
try :
2013-07-22 16:15:32 +02:00
url = makeurl ( opts . apiurl , [ ' source ' , prj , spec , ' ?expand=1 ' ] )
2013-04-16 22:41:52 +02:00
root = ET . parse ( http_GET ( url ) ) . getroot ( )
link = root . find ( ' linkinfo ' )
2013-05-13 12:48:21 +02:00
if link != None :
lprj = link . attrib . get ( ' project ' , ' ' )
lpkg = link . attrib . get ( ' package ' , ' ' )
lmd5 = link . attrib [ ' srcmd5 ' ]
2013-04-16 22:41:52 +02:00
except urllib2 . HTTPError :
pass # leave lprj
2013-07-22 16:15:32 +02:00
2013-05-13 12:48:21 +02:00
if lprj != prj or lpkg != pkg and not p . updated :
2013-07-22 16:24:17 +02:00
msg = ' %s / %s should _link to %s / %s ' % ( prj , spec , prj , pkg )
2013-07-22 14:29:40 +02:00
self . _check_repo_change_review_state ( opts , id_ , ' declined ' , message = msg )
2013-06-17 12:19:38 +02:00
print msg
2013-05-13 12:48:21 +02:00
p . updated = True
if lmd5 != p . rev and not p . updated :
2013-07-22 16:24:17 +02:00
msg = ' %s / %s is a link but has a different md5sum than %s ? ' % ( prj , spec , pkg )
2013-07-22 14:29:40 +02:00
self . _check_repo_change_review_state ( opts , id_ , ' new ' , message = msg )
2013-06-17 12:19:38 +02:00
print msg
2013-05-13 12:48:21 +02:00
p . updated = True
2013-04-16 22:41:52 +02:00
sp = CheckRepoPackage ( )
sp . spackage = spec
sp . sproject = prj
sp . tpackage = spec
sp . tproject = tprj
sp . group = p . group
2013-07-22 14:29:40 +02:00
sp . request = id_
2013-04-16 22:41:52 +02:00
packs . append ( sp )
sp . rev = root . attrib [ ' srcmd5 ' ]
return packs
2013-06-20 17:30:54 +02:00
2013-07-24 16:58:36 +02:00
@memoize
2013-07-24 15:34:08 +02:00
def last_build_success ( apiurl , src_project , tgt_project , src_package , rev ) :
root = None
2013-04-16 22:41:52 +02:00
try :
2013-07-24 15:34:08 +02:00
url = makeurl ( apiurl ,
[ ' build ' , src_project ,
' _result?lastsuccess&package= %s &pathproject= %s &srcmd5= %s ' % ( quote_plus ( src_package ) ,
quote_plus ( tgt_project ) ,
rev ) ] )
2013-07-24 16:54:55 +02:00
root = http_GET ( url ) . read ( )
2013-07-22 17:19:13 +02:00
except urllib2 . HTTPError , e :
print ' ERROR in URL %s [ %s ] ' % ( url , e )
2013-07-24 15:34:08 +02:00
return root
def _check_repo_buildsuccess ( self , p , opts ) :
2013-07-24 16:54:55 +02:00
root_xml = last_build_success ( opts . apiurl , p . sproject , p . tproject , p . spackage , p . rev )
root = ET . fromstring ( root_xml )
2013-07-24 15:34:08 +02:00
if not root :
2013-04-16 22:41:52 +02:00
return False
2013-07-22 17:19:13 +02:00
if ' code ' in root . attrib :
2013-04-16 20:31:12 +02:00
print ET . tostring ( root )
2013-04-16 22:41:52 +02:00
return False
2013-07-22 17:19:13 +02:00
2013-04-16 20:31:12 +02:00
result = False
2013-04-16 23:10:04 +02:00
p . goodrepo = None
2013-04-16 20:31:12 +02:00
missings = { }
alldisabled = True
foundbuilding = None
foundfailed = None
2013-05-13 12:48:21 +02:00
tocheckrepos = [ ]
2013-04-16 20:31:12 +02:00
for repo in root . findall ( ' repository ' ) :
2013-07-22 17:19:13 +02:00
archs = [ a . attrib [ ' arch ' ] for a in repo . findall ( ' arch ' ) ]
foundarchs = len ( [ a for a in archs if a in ( ' i586 ' , ' x86_64 ' ) ] )
2013-05-13 12:48:21 +02:00
if foundarchs == 2 :
tocheckrepos . append ( repo )
2013-07-22 17:19:13 +02:00
if not tocheckrepos :
msg = ' Missing i586 and x86_64 in the repo list '
2013-05-13 12:48:21 +02:00
self . _check_repo_change_review_state ( opts , p . request , ' new ' , message = msg )
2013-07-22 17:19:13 +02:00
print ' UPDATED ' , msg
2013-05-13 12:48:21 +02:00
return False
2013-07-22 17:19:13 +02:00
2013-05-13 12:48:21 +02:00
for repo in tocheckrepos :
isgood = True
founddisabled = False
r_foundbuilding = None
r_foundfailed = None
r_missings = { }
for arch in repo . findall ( ' arch ' ) :
2013-07-22 17:19:13 +02:00
if arch . attrib [ ' arch ' ] not in ( ' i586 ' , ' x86_64 ' ) :
2013-05-13 12:48:21 +02:00
continue
2013-07-22 17:19:13 +02:00
if ' missing ' in arch . attrib :
2013-05-13 12:48:21 +02:00
for pkg in arch . attrib [ ' missing ' ] . split ( ' , ' ) :
if not self . _check_repo_avoid_wrong_friends ( p . sproject , repo . attrib [ ' name ' ] , arch . attrib [ ' arch ' ] , pkg , opts ) :
missings [ pkg ] = 1
if not ( arch . attrib [ ' result ' ] in [ ' succeeded ' , ' excluded ' ] ) :
isgood = False
if arch . attrib [ ' result ' ] == ' excluded ' and arch . attrib [ ' arch ' ] == ' x86_64 ' :
p . build_excluded = True
if arch . attrib [ ' result ' ] == ' disabled ' :
founddisabled = True
if arch . attrib [ ' result ' ] == ' failed ' :
r_foundfailed = repo . attrib [ ' name ' ]
if arch . attrib [ ' result ' ] == ' building ' :
r_foundbuilding = repo . attrib [ ' name ' ]
if arch . attrib [ ' result ' ] == ' outdated ' :
msg = " %s ' s sources were changed after submissions and the old sources never built. Please resubmit " % p . spackage
2013-07-22 17:19:13 +02:00
print ' DECLINED ' , msg
2013-05-13 12:48:21 +02:00
self . _check_repo_change_review_state ( opts , p . request , ' new ' , message = msg )
return False
r_missings = r_missings . keys ( )
2013-04-18 13:33:25 +02:00
for pkg in r_missings :
missings [ pkg ] = 1
2013-04-16 20:31:12 +02:00
if not founddisabled :
alldisabled = False
if isgood :
2013-04-30 11:49:26 +02:00
p . goodrepo = repo . attrib [ ' name ' ]
result = True
2013-04-16 20:31:12 +02:00
if r_foundbuilding :
foundbuilding = r_foundbuilding
if r_foundfailed :
foundfailed = r_foundfailed
2013-07-25 13:38:11 +02:00
p . missings = sorted ( missings )
2013-04-30 11:49:26 +02:00
2013-05-13 12:48:21 +02:00
if result :
2013-04-16 22:41:52 +02:00
return True
if alldisabled :
2013-04-22 09:40:05 +02:00
msg = " %s is disabled or does not build against factory. Please fix and resubmit " % p . spackage
2013-07-22 17:19:13 +02:00
print ' DECLINED ' , msg
2013-04-16 22:41:52 +02:00
self . _check_repo_change_review_state ( opts , p . request , ' declined ' , message = msg )
return False
if foundbuilding :
2013-04-22 09:40:05 +02:00
msg = " {1} is still building for repository {0} " . format ( foundbuilding , p . spackage )
2013-04-16 22:41:52 +02:00
self . _check_repo_change_review_state ( opts , p . request , ' new ' , message = msg )
2013-07-22 17:19:13 +02:00
print ' UPDATED ' , msg
2013-04-16 22:41:52 +02:00
return False
if foundfailed :
2013-04-22 09:40:05 +02:00
msg = " {1} failed to build in repository {0} - not accepting " . format ( foundfailed , p . spackage )
2013-04-16 22:41:52 +02:00
self . _check_repo_change_review_state ( opts , p . request , ' new ' , message = msg )
2013-07-24 15:34:08 +02:00
print ' UPDATED ' , msg
2013-04-16 22:41:52 +02:00
return False
2013-03-21 11:34:18 +01:00
2013-04-16 22:41:52 +02:00
return True
2013-03-21 11:34:18 +01:00
2013-06-20 17:30:54 +02:00
2013-07-25 14:18:28 +02:00
def _check_repo_repo_list ( self , prj , repo , arch , pkg , opts , ignore = False ) :
2013-04-30 11:49:26 +02:00
url = makeurl ( opts . apiurl , [ ' build ' , prj , repo , arch , pkg ] )
files = [ ]
try :
2013-07-25 14:18:28 +02:00
binaries = ET . parse ( http_GET ( url ) ) . getroot ( )
2013-07-25 14:20:52 +02:00
for bin_ in binaries . findall ( ' binary ' ) :
fn = bin_ . attrib [ ' filename ' ]
2013-04-30 11:49:26 +02:00
result = re . match ( " (.*)-([^-]*)-([^-]*) \ .([^- \ .]+) \ .rpm " , fn )
if not result :
if fn == ' rpmlint.log ' :
files . append ( ( fn , ' ' , ' ' ) )
continue
2013-07-25 14:20:52 +02:00
pname = result . group ( 1 )
2013-04-30 11:49:26 +02:00
if pname . endswith ( ' -debuginfo ' ) or pname . endswith ( ' -debuginfo-32bit ' ) :
continue
if pname . endswith ( ' -debugsource ' ) :
continue
if result . group ( 4 ) == ' src ' :
continue
files . append ( ( fn , pname , result . group ( 4 ) ) )
2013-07-22 17:19:13 +02:00
except urllib2 . HTTPError , e :
2013-07-25 14:18:28 +02:00
if not ignore :
print ' ERROR in URL %s [ %s ] ' % ( url , e )
2013-04-30 11:49:26 +02:00
return files
2013-06-20 17:30:54 +02:00
2013-04-30 11:49:26 +02:00
def _check_repo_get_binary ( self , apiurl , prj , repo , arch , package , file , target ) :
if os . path . exists ( target ) :
return
get_binary_file ( apiurl , prj , repo , arch , file , package = package , target_filename = target )
2013-06-20 17:30:54 +02:00
2013-04-30 11:49:26 +02:00
def _check_repo_download ( self , p , destdir , opts ) :
2013-05-13 12:48:21 +02:00
if p . build_excluded :
return [ ] , [ ]
2013-04-30 11:49:26 +02:00
p . destdir = destdir + " / %s " % p . tpackage
2013-04-17 20:43:50 +02:00
if not os . path . isdir ( p . destdir ) :
os . makedirs ( p . destdir , 0755 )
2013-04-16 20:31:12 +02:00
# we can assume x86_64 is there
2013-04-30 11:49:26 +02:00
todownload = [ ]
for fn in self . _check_repo_repo_list ( p . sproject , p . goodrepo , ' x86_64 ' , p . spackage , opts ) :
todownload . append ( ( ' x86_64 ' , fn [ 0 ] ) )
2013-07-25 15:34:12 +02:00
2013-04-16 20:31:12 +02:00
# now fetch -32bit packs
2013-04-30 11:49:26 +02:00
for fn in self . _check_repo_repo_list ( p . sproject , p . goodrepo , ' i586 ' , p . spackage , opts ) :
if fn [ 2 ] != ' x86_64 ' : continue
todownload . append ( ( ' i586 ' , fn [ 0 ] ) )
2013-07-25 15:34:12 +02:00
2013-04-30 11:49:26 +02:00
downloads = [ ]
for arch , fn in todownload :
t = os . path . join ( p . destdir , fn )
self . _check_repo_get_binary ( opts . apiurl , p . sproject , p . goodrepo ,
arch , p . spackage , fn , t )
downloads . append ( t )
if fn . endswith ( ' .rpm ' ) :
pid = subprocess . Popen ( [ " rpm " , " --nosignature " , " --queryformat " , " % {DISTURL} " , " -qp " , t ] ,
stdout = subprocess . PIPE , close_fds = True )
2013-07-22 17:19:13 +02:00
os . waitpid ( pid . pid , 0 ) [ 1 ]
2013-04-30 11:49:26 +02:00
disturl = pid . stdout . readlines ( )
2013-07-25 15:34:12 +02:00
2013-04-30 11:49:26 +02:00
if not os . path . basename ( disturl [ 0 ] ) . startswith ( p . rev ) :
p . error = " disturl %s does not match revision %s " % ( disturl [ 0 ] , p . rev )
return [ ] , [ ]
2013-03-21 20:37:23 +01:00
2013-04-16 20:31:12 +02:00
toignore = [ ]
2013-07-25 14:18:28 +02:00
for fn in self . _check_repo_repo_list ( p . tproject , ' standard ' , ' x86_64 ' , p . tpackage , opts , ignore = True ) :
2013-04-30 11:49:26 +02:00
toignore . append ( fn [ 1 ] )
2013-04-16 20:31:12 +02:00
# now fetch -32bit pack list
2013-07-25 14:18:28 +02:00
for fn in self . _check_repo_repo_list ( p . tproject , ' standard ' , ' i586 ' , p . tpackage , opts , ignore = True ) :
2013-04-30 11:49:26 +02:00
if fn [ 2 ] != ' x86_64 ' : continue
toignore . append ( fn [ 1 ] )
return toignore , downloads
2013-04-16 23:10:04 +02:00
2013-06-20 17:30:54 +02:00
def _get_build_deps ( self , prj , repo , arch , pkg , opts ) :
xml = get_buildinfo ( opts . apiurl , prj , pkg , repo , arch )
root = ET . fromstring ( xml )
return [ e . attrib [ ' name ' ] for e in root . findall ( ' bdep ' ) ]
def _get_base_build_bin ( self , opts ) :
""" Get Base:build pagacke list """
binaries = { }
for arch in ( ' x86_64 ' , ' i586 ' ) :
url = makeurl ( opts . apiurl , [ ' /build/Base:build/standard/ %s /_repository ' % arch , ] )
f = http_GET ( url )
root = ET . parse ( f ) . getroot ( )
binaries [ arch ] = set ( [ e . attrib [ ' filename ' ] [ : - 4 ] for e in root . findall ( ' binary ' ) ] )
return binaries
def _get_base_build_src ( self , opts ) :
""" Get Base:build pagacke list """
url = makeurl ( opts . apiurl , [ ' /source/Base:build ' , ] )
f = http_GET ( url )
root = ET . parse ( f ) . getroot ( )
return set ( [ e . attrib [ ' name ' ] for e in root . findall ( ' entry ' ) ] )
2013-07-22 14:29:40 +02:00
def _check_repo_group ( self , id_ , reqs , opts ) :
2013-07-22 17:19:13 +02:00
print ' \n Check group ' , reqs
if not all ( self . _check_repo_buildsuccess ( r , opts ) for r in reqs ) :
return
2013-04-16 23:10:04 +02:00
# all succeeded
2013-07-22 17:19:13 +02:00
toignore , downloads = [ ] , [ ]
destdir = os . path . expanduser ( ' ~/co/ %s ' % str ( reqs [ 0 ] . group ) )
2013-07-25 13:38:11 +02:00
fetched = { r : False for r in opts . groups . get ( id_ , [ ] ) }
2013-04-18 13:33:25 +02:00
goodrepo = ' '
2013-06-17 12:19:38 +02:00
packs = [ ]
2013-04-16 23:10:04 +02:00
for p in reqs :
2013-04-30 11:49:26 +02:00
i , d = self . _check_repo_download ( p , destdir , opts )
if p . error :
print p . error
2013-05-13 12:48:21 +02:00
p . updated = True
2013-04-30 11:49:26 +02:00
self . _check_repo_change_review_state ( opts , p . request , ' new ' , message = p . error )
return
downloads . extend ( d )
toignore . extend ( i )
2013-06-17 12:19:38 +02:00
fetched [ p . request ] = True
2013-04-30 11:49:26 +02:00
goodrepo = p . goodrepo
2013-06-17 12:19:38 +02:00
packs . append ( p )
2013-04-30 11:49:26 +02:00
2013-04-18 13:33:25 +02:00
for req , f in fetched . items ( ) :
2013-06-17 12:19:38 +02:00
if not f :
packs . extend ( self . _check_repo_fetch_request ( req , opts ) )
2013-04-18 13:33:25 +02:00
for p in packs :
2013-04-30 11:49:26 +02:00
p . goodrepo = goodrepo
i , d = self . _check_repo_download ( p , destdir , opts )
if p . error :
2013-07-22 17:19:13 +02:00
print ' ALREADY ACEPTED: ' , p . error
2013-06-17 12:19:38 +02:00
p . updated = True
2013-04-30 11:49:26 +02:00
downloads . extend ( d )
toignore . extend ( i )
2013-06-20 17:30:54 +02:00
# Get all the Base:build packages (source and binary)
base_build_bin = self . _get_base_build_bin ( opts )
base_build_src = self . _get_base_build_src ( opts )
for p in reqs :
# Be sure that if the package is in Base:build, all the
# dependecies are also in Base:build
if p . spackage in base_build_src :
# TODO - Check all the arch for this package
for arch in ( ' x86_64 ' , ' i586 ' ) :
build_deps = set ( self . _get_build_deps ( p . sproject , p . goodrepo , arch , p . spackage , opts ) )
outliers = build_deps - base_build_bin [ arch ]
if outliers :
2013-07-22 17:19:13 +02:00
print ' OUTLIERS ( %s ) ' % arch , outliers
2013-07-25 15:34:12 +02:00
msg = ' This package is a Base:build and one of the dependencies is outside Base:build ( %s ) ' % ( ' , ' . join ( outliers ) )
# self._check_repo_change_review_state(opts, p.request, 'new', message=msg)
print ' NON-(FIX)-UPDATED ' , msg
return
2013-06-20 17:30:54 +02:00
2013-04-30 11:49:26 +02:00
for p in reqs :
smissing = [ ]
for package in p . missings :
alreadyin = False
2013-06-04 17:29:59 +02:00
print package , packs
2013-04-30 11:49:26 +02:00
for t in packs :
if package == t . tpackage : alreadyin = True
if alreadyin :
continue
print package , packs , downloads , toignore
request = self . _check_repo_find_submit_request ( opts , p . tproject , package )
if request :
greqs = opts . groups . get ( p . group , [ ] )
if request in greqs : continue
package = " %s (rq %s ) " % ( package , request )
smissing . append ( package )
if len ( smissing ) :
msg = " please make sure to wait before these depencencies are in {0} : {1} " . format ( p . tproject , ' , ' . join ( smissing ) )
self . _check_repo_change_review_state ( opts , p . request , ' new ' , message = msg )
2013-07-22 17:19:13 +02:00
print ' UPDATED ' , msg
2013-04-30 11:49:26 +02:00
return
for dirname , dirnames , filenames in os . walk ( destdir ) :
2013-05-14 11:41:16 +02:00
if len ( dirnames ) + len ( filenames ) == 0 :
os . rmdir ( dirname )
2013-04-30 11:49:26 +02:00
for filename in filenames :
fn = os . path . join ( dirname , filename )
if not fn in downloads :
os . unlink ( fn )
2013-04-16 23:10:04 +02:00
2013-04-17 20:43:50 +02:00
civs = " LC_ALL=C perl /suse/coolo/checker/repo-checker.pl ' %s ' ' %s ' 2>&1 " % ( destdir , ' , ' . join ( toignore ) )
2013-04-22 09:40:05 +02:00
#exit(1)
2013-04-16 23:10:04 +02:00
p = subprocess . Popen ( civs , shell = True , stdout = subprocess . PIPE , close_fds = True )
2013-04-30 11:49:26 +02:00
#ret = os.waitpid(p.pid, 0)[1]
output , _ = p . communicate ( )
ret = p . returncode
2013-04-17 20:43:50 +02:00
updated = dict ( )
if ret :
print output , set ( map ( lambda x : x . request , reqs ) )
for p in reqs :
2013-04-18 13:33:25 +02:00
if updated . get ( p . request , False ) or p . updated : continue
2013-04-17 20:43:50 +02:00
self . _check_repo_change_review_state ( opts , p . request , ' new ' , message = output )
updated [ p . request ] = 1
2013-04-18 13:33:25 +02:00
p . updated = True
2013-04-17 20:43:50 +02:00
return
for p in reqs :
2013-04-18 13:33:25 +02:00
if updated . get ( p . request , False ) or p . updated : continue
2013-04-17 20:43:50 +02:00
msg = " Builds for repo %s " % p . goodrepo
self . _check_repo_change_review_state ( opts , p . request , ' accepted ' , message = msg )
updated [ p . request ] = 1
2013-04-18 13:33:25 +02:00
p . updated = True
shutil . rmtree ( destdir )
2013-06-20 17:30:54 +02:00
def _check_repo_fetch_request ( self , id_ , opts ) :
url = makeurl ( opts . apiurl , [ ' request ' , str ( id_ ) ] )
2013-07-25 13:38:11 +02:00
root = ET . parse ( http_GET ( url ) ) . getroot ( )
2013-06-20 17:30:54 +02:00
return self . _check_repo_one_request ( root , opts )
2013-04-18 13:33:25 +02:00
2013-07-22 16:15:32 +02:00
@cmdln.alias ( ' check ' , ' cr ' )
@cmdln.option ( ' -s ' , ' --skip ' , action = ' store_true ' , help = ' skip review ' )
2013-03-21 11:34:18 +01:00
def do_check_repo ( self , subcmd , opts , * args ) :
2013-07-22 16:15:32 +02:00
""" $ {cmd_name} : Checker review of submit requests.
2013-03-21 11:34:18 +01:00
Usage :
2013-07-22 16:15:32 +02:00
$ { cmd_name } [ SRID ] . . .
2013-03-21 11:34:18 +01:00
Shows pending review requests and their current state .
$ { cmd_option_list }
"""
opts . mode = ' '
2013-04-17 20:43:50 +02:00
opts . groups = { }
2013-04-30 11:49:26 +02:00
opts . grouped = { }
2013-03-21 11:34:18 +01:00
opts . verbose = False
opts . apiurl = self . get_api_url ( )
2013-07-22 16:15:32 +02:00
if opts . skip :
if not len ( args ) :
raise oscerr . WrongArgs ( ' Please give, if you want to skip a review specify a SRID ' )
for id_ in args :
2013-06-20 17:30:54 +02:00
self . _check_repo_change_review_state ( opts , id_ , ' accepted ' , message = ' skip review ' )
2013-03-21 11:34:18 +01:00
return
2013-06-20 17:30:54 +02:00
ids = [ arg for arg in args if arg . isdigit ( ) ]
2013-03-21 11:34:18 +01:00
2013-04-16 22:41:52 +02:00
packs = [ ]
2013-06-20 17:30:54 +02:00
if not ids :
2013-03-21 11:34:18 +01:00
# xpath query, using the -m, -r, -s options
where = " @by_user= ' factory-repo-checker ' +and+@state= ' new ' "
2013-06-20 17:30:54 +02:00
url = makeurl ( opts . apiurl , [ ' search ' , ' request ' ] ,
" match=state/@name= ' review ' +and+review[ " + where + " ] " )
2013-03-21 11:34:18 +01:00
f = http_GET ( url )
root = ET . parse ( f ) . getroot ( )
for rq in root . findall ( ' request ' ) :
2013-04-16 23:10:04 +02:00
packs . extend ( self . _check_repo_one_request ( rq , opts ) )
2013-03-21 11:34:18 +01:00
else :
# we have a list, use them.
2013-06-20 17:30:54 +02:00
for id_ in ids :
packs . extend ( self . _check_repo_fetch_request ( id_ , opts ) )
2013-04-16 22:41:52 +02:00
groups = { }
for p in packs :
a = groups . get ( p . group , [ ] )
a . append ( p )
groups [ p . group ] = a
2013-07-22 14:36:15 +02:00
for id_ , reqs in groups . items ( ) :
self . _check_repo_group ( id_ , reqs , opts )