2014-05-26 16:03:06 +02:00
# Copyright (C) 2014 SUSE Linux Products GmbH
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
2014-06-25 10:42:31 +02:00
import os
2014-07-21 14:33:48 +02:00
import re
2014-06-25 10:42:31 +02:00
import subprocess
2014-06-05 17:44:02 +02:00
from urllib import quote_plus
2014-05-26 16:03:06 +02:00
import urllib2
from xml . etree import cElementTree as ET
2014-08-28 14:42:12 +02:00
from pprint import pformat
2014-05-26 16:03:06 +02:00
2014-07-21 14:33:48 +02:00
from osc . core import get_binary_file
2014-08-20 15:36:19 +02:00
from osc . core import http_DELETE
2014-06-04 14:12:37 +02:00
from osc . core import http_GET
2014-05-26 16:03:06 +02:00
from osc . core import http_POST
from osc . core import makeurl
from osclib . stagingapi import StagingAPI
2014-06-05 17:44:02 +02:00
from osclib . memoize import memoize
2014-10-01 17:41:26 +02:00
from osclib . pkgcache import PkgCache
2014-05-26 16:03:06 +02:00
2014-06-25 10:42:31 +02:00
# Directory where download binary packages.
2014-09-15 13:32:02 +02:00
BINCACHE = os . path . expanduser ( ' ~/co ' )
DOWNLOADS = os . path . join ( BINCACHE , ' downloads ' )
2014-06-25 10:42:31 +02:00
2014-06-12 18:25:45 +02:00
class Request ( object ) :
""" Simple request container. """
def __init__ ( self , request_id = None , src_project = None ,
src_package = None , tgt_project = None , tgt_package = None ,
2014-07-04 13:34:53 +02:00
revision = None , srcmd5 = None , verifymd5 = None ,
group = None , goodrepos = None , missings = None ,
2014-08-19 10:22:32 +02:00
is_shadow = None , shadow_src_project = None ,
2014-07-04 13:34:53 +02:00
element = None ) :
2014-06-12 18:25:45 +02:00
self . request_id = request_id
self . src_project = src_project
self . src_package = src_package
self . tgt_project = tgt_project
self . tgt_package = tgt_package
self . revision = revision
self . srcmd5 = srcmd5
2014-07-04 13:34:53 +02:00
self . verifymd5 = verifymd5
2014-06-12 18:25:45 +02:00
self . group = group
2014-06-16 17:57:40 +02:00
self . goodrepos = goodrepos if goodrepos else [ ]
2015-02-24 17:57:19 +01:00
self . missings = missings if missings else { }
2014-08-19 10:22:32 +02:00
self . is_shadow = is_shadow
self . shadow_src_project = shadow_src_project
2014-06-12 18:25:45 +02:00
self . updated = False
self . error = None
2016-02-25 22:58:13 +08:00
self . build_excluded = False
2014-07-17 14:56:59 +02:00
self . action_type = ' submit ' # assume default
2014-07-21 11:10:46 +02:00
self . downloads = [ ]
2014-08-18 17:18:00 +02:00
self . is_shadow_devel = False
2016-04-20 20:54:56 +08:00
self . i686_only = [ ' glibc.i686 ' ]
2014-06-12 18:25:45 +02:00
if element :
self . load ( element )
def load ( self , element ) :
""" Load a node from a ElementTree request XML element. """
self . request_id = int ( element . get ( ' id ' ) )
action = element . find ( ' action ' )
2014-07-21 11:10:46 +02:00
self . action_type = action . get ( ' type ' )
2014-07-17 11:36:05 +02:00
source = action . find ( ' source ' )
if source is not None :
self . src_project = source . get ( ' project ' )
self . src_package = source . get ( ' package ' )
self . revision = source . get ( ' rev ' )
target = action . find ( ' target ' )
if target is not None :
self . tgt_project = target . get ( ' project ' )
self . tgt_package = target . get ( ' package ' )
2014-06-16 17:57:40 +02:00
2014-06-12 18:25:45 +02:00
# The groups are in the CheckRepo object.
self . group = self . request_id
2014-06-16 17:57:40 +02:00
# Assigned in is_buildsuccess
2014-06-17 18:00:59 +02:00
self . goodrepos = [ ]
2015-02-24 17:57:19 +01:00
self . missings = { }
2014-06-16 17:57:40 +02:00
2014-08-19 10:22:32 +02:00
# Detect if the request comes from Factory to a openSUSE
# release, and adjust the source and target projects
_is_product = re . match ( r ' openSUSE: \ d {2} . \ d ' , self . tgt_project )
if self . src_project == ' openSUSE:Factory ' and _is_product :
self . is_shadow_devel = True
self . shadow_src_project = ' %s :Devel ' % self . tgt_project
else :
self . is_shadow_devel = False
self . shadow_src_project = self . src_project
2014-06-27 15:12:39 +02:00
def str_compact ( self ) :
2014-11-12 14:08:00 +01:00
s = None
if self . action_type == ' delete ' :
s = ' #[ %s ] DELETE ( %s ) %s ' % (
self . request_id , self . tgt_package ,
( ' Shadow via %s ' % self . shadow_src_project ) if self . is_shadow_devel else ' ' )
else :
s = ' #[ %s ]( %s ) %s ' % (
self . request_id , self . src_package ,
( ' Shadow via %s ' % self . shadow_src_project ) if self . is_shadow_devel else ' ' )
return s
2014-06-27 15:12:39 +02:00
2014-07-01 19:55:17 +02:00
def __repr__ ( self ) :
2014-11-12 14:08:00 +01:00
s = None
if self . action_type == ' delete ' :
s = ' #[ %s ] DELETE -> %s / %s %s ' % (
self . request_id ,
self . tgt_project ,
self . tgt_package ,
( ' Shadow via %s ' % self . shadow_src_project ) if self . is_shadow_devel else ' ' )
else :
s = ' #[ %s ] %s / %s -> %s / %s %s ' % (
self . request_id ,
self . src_project ,
self . src_package ,
self . tgt_project ,
self . tgt_package ,
( ' Shadow via %s ' % self . shadow_src_project ) if self . is_shadow_devel else ' ' )
return s
2014-06-12 18:25:45 +02:00
2014-05-26 16:03:06 +02:00
class CheckRepo ( object ) :
2015-02-26 10:57:06 +01:00
def __init__ ( self , apiurl , project , readonly = False , force_clean = False , debug = False ) :
2014-05-26 16:03:06 +02:00
""" CheckRepo constructor. """
self . apiurl = apiurl
2015-02-26 10:57:06 +01:00
self . project = project
2015-02-19 10:57:55 +01:00
self . staging = StagingAPI ( apiurl , self . project )
2014-05-26 16:03:06 +02:00
2014-10-14 13:06:32 +02:00
self . pkgcache = PkgCache ( BINCACHE , force_clean = force_clean )
2014-10-01 17:41:26 +02:00
2014-05-26 16:03:06 +02:00
# grouped = { id: staging, }
self . grouped = { }
# groups = { staging: [ids,], }
self . groups = { }
self . _staging ( )
2014-09-01 09:43:44 +02:00
self . readonly = readonly
2014-08-28 14:42:12 +02:00
self . debug_enable = debug
def debug ( self , * args ) :
if not self . debug_enable :
return
2014-09-01 09:43:44 +02:00
print ' ' . join ( [ i if isinstance ( i , basestring ) else pformat ( i ) for i in args ] )
2014-05-26 16:03:06 +02:00
def _staging ( self ) :
2014-06-12 18:25:45 +02:00
""" Preload the groups of related request associated by the same
2014-05-26 16:03:06 +02:00
staging project .
2014-06-12 18:25:45 +02:00
2014-05-26 16:03:06 +02:00
"""
for project in self . staging . get_staging_projects ( ) :
# Get all the requests identifier for the project
2014-07-21 11:10:46 +02:00
requests = self . staging . get_prj_pseudometa ( project ) [ ' requests ' ]
requests = [ req [ ' id ' ] for req in requests ]
2014-05-26 16:03:06 +02:00
# Note: Originally we recover also the request returned by
# list_requests_in_prj(). I guest that if the staging
# project is working properly, this method do not add any
# new request to the list.
if requests :
self . groups [ project ] = requests
self . grouped . update ( { req : project for req in requests } )
2014-07-23 16:45:31 +02:00
def get_request_state ( self , request_id ) :
""" Return the current state of the request. """
state = None
url = makeurl ( self . apiurl , ( ' request ' , str ( request_id ) ) )
try :
root = ET . parse ( http_GET ( url ) ) . getroot ( )
state = root . find ( ' state ' ) . get ( ' name ' )
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
return state
2014-08-01 10:00:23 +02:00
def get_review_state ( self , request_id ) :
""" Return the current review state of the request. """
states = [ ]
url = makeurl ( self . apiurl , ( ' request ' , str ( request_id ) ) )
try :
root = ET . parse ( http_GET ( url ) ) . getroot ( )
states = [ review . get ( ' state ' ) for review in root . findall ( ' review ' ) if review . get ( ' by_user ' ) == ' factory-repo-checker ' ]
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
return states [ 0 ] if states else ' '
2014-05-26 16:03:06 +02:00
def change_review_state ( self , request_id , newstate , message = ' ' ) :
""" Based on osc/osc/core.py. Fixed ' by_user ' . """
query = {
' cmd ' : ' changereviewstate ' ,
' newstate ' : newstate ,
2014-06-04 11:49:22 +02:00
# XXX TODO - We force the user here, check if the user
# expressed in .oscrc (with the password stored) have
# rights to become this user.
2014-05-26 16:03:06 +02:00
' by_user ' : ' factory-repo-checker ' ,
}
2014-08-01 10:00:23 +02:00
review_state = self . get_review_state ( request_id )
if review_state == ' accepted ' and newstate != ' accepted ' :
print ' - Avoid change state %s -> %s ( %s ) ' % ( review_state , newstate , message )
2014-07-23 16:45:31 +02:00
2014-05-26 16:03:06 +02:00
code = 404
2014-06-25 10:42:31 +02:00
url = makeurl ( self . apiurl , ( ' request ' , str ( request_id ) ) , query = query )
2014-09-01 09:43:44 +02:00
if self . readonly :
print ' DRY RUN: POST %s ' % url
return 200
2014-05-26 16:03:06 +02:00
try :
root = ET . parse ( http_POST ( url , data = message ) ) . getroot ( )
code = root . attrib [ ' code ' ]
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
return code
2014-06-04 14:12:37 +02:00
2014-08-18 17:18:00 +02:00
def get_request ( self , request_id , internal = False ) :
""" Get a request XML or internal object. """
2014-06-04 14:12:37 +02:00
request = None
try :
url = makeurl ( self . apiurl , ( ' request ' , str ( request_id ) ) )
request = ET . parse ( http_GET ( url ) ) . getroot ( )
2014-08-18 17:18:00 +02:00
if internal :
request = Request ( element = request )
2014-06-04 14:12:37 +02:00
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
return request
def pending_requests ( self ) :
""" Search pending requests to review. """
requests = [ ]
2014-08-13 09:46:37 +02:00
review = " @by_user= ' factory-repo-checker ' +and+@state= ' new ' "
2015-02-19 10:57:55 +01:00
target = " @project= ' {} ' " . format ( self . project )
target_nf = " @project= ' {} ' " . format ( self . staging . cnonfree )
2014-06-04 14:12:37 +02:00
try :
url = makeurl ( self . apiurl , ( ' search ' , ' request ' ) ,
2014-09-22 17:35:28 +02:00
" match=state/@name= ' review ' +and+review[ %s ]+and+(target[ %s ]+or+target[ %s ]) " % (
review , target , target_nf ) )
2014-06-04 14:12:37 +02:00
root = ET . parse ( http_GET ( url ) ) . getroot ( )
requests = root . findall ( ' request ' )
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
return requests
2014-06-05 17:44:02 +02:00
2014-07-21 14:33:48 +02:00
def find_request_id ( self , project , package ) :
""" Return a request id that is in new, review of accepted state for a
specific project / package .
"""
xpath = " (action/target/@project= ' %s ' and " \
" action/target/@package= ' %s ' and " \
" action/@type= ' submit ' and " \
" (state/@name= ' new ' or state/@name= ' review ' or " \
" state/@name= ' accepted ' )) " % ( project , package )
query = {
2014-07-24 11:01:30 +02:00
' match ' : xpath
2014-07-21 14:33:48 +02:00
}
request_id = None
try :
url = makeurl ( self . apiurl , ( ' search ' , ' request ' ) , query = query )
collection = ET . parse ( http_GET ( url ) ) . getroot ( )
for root in collection . findall ( ' request ' ) :
2014-07-24 11:01:30 +02:00
_request = Request ( element = root )
request_id = _request . request_id
2014-07-21 14:33:48 +02:00
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
return request_id
2014-07-01 09:28:29 +02:00
def _build ( self , project , repository , arch , package ) :
2014-06-05 17:44:02 +02:00
""" Return the build XML document from OBS. """
2014-06-06 10:47:06 +02:00
xml = ' '
2014-06-05 17:44:02 +02:00
try :
2014-06-16 17:57:40 +02:00
url = makeurl ( self . apiurl , ( ' build ' , project , repository , arch , package ) )
2014-06-05 17:44:02 +02:00
xml = http_GET ( url ) . read ( )
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
return xml
@memoize ( )
2014-07-01 09:28:29 +02:00
def build ( self , project , repository , arch , package ) :
""" Return the build XML document from OBS. """
return self . _build ( project , repository , arch , package )
def _last_build_success ( self , src_project , tgt_project , src_package , rev ) :
2014-06-05 17:44:02 +02:00
""" Return the last build success XML document from OBS. """
2014-06-06 10:47:06 +02:00
xml = ' '
2015-08-28 12:58:02 +02:00
url = makeurl ( self . apiurl ,
( ' build ' , src_project ,
' _result?lastsuccess&package= %s &pathproject= %s &srcmd5= %s ' % (
quote_plus ( src_package ) ,
quote_plus ( tgt_project ) ,
rev ) ) )
xml = http_GET ( url ) . read ( )
2014-06-05 17:44:02 +02:00
return xml
2014-06-12 18:25:45 +02:00
2014-07-01 09:28:29 +02:00
@memoize ( )
def last_build_success ( self , src_project , tgt_project , src_package , rev ) :
""" Return the last build success XML document from OBS. """
return self . _last_build_success ( src_project , tgt_project , src_package , rev )
2014-06-12 18:25:45 +02:00
def get_project_repos ( self , src_project , tgt_project , src_package , rev ) :
""" Read the repositories of the project from _meta. """
# XXX TODO - Shitty logic here. A better proposal is refactorize
# _check_repo_buildsuccess.
repos = [ ]
url = makeurl ( self . apiurl ,
( ' build ' , src_project ,
' _result?lastsuccess&package= %s &pathproject= %s &srcmd5= %s ' % (
quote_plus ( src_package ) ,
quote_plus ( tgt_project ) ,
rev ) ) )
try :
root = ET . parse ( http_GET ( url ) ) . getroot ( )
for element in root . findall ( ' repository ' ) :
archs = [ ( e . get ( ' arch ' ) , e . get ( ' result ' ) ) for e in element . findall ( ' arch ' ) ]
repos . append ( ( element . get ( ' name ' ) , archs ) )
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
return repos
def old_md5 ( self , src_project , tgt_project , src_package , rev ) :
""" Recollect old MD5 for a package. """
# XXX TODO - instead of fixing the limit, use endtime to make
# sure that we have the correct time frame.
limit = 20
query = {
' package ' : src_package ,
# 'code': 'succeeded',
' limit ' : limit ,
}
repositories = self . get_project_repos ( src_project ,
tgt_project ,
src_package , rev )
srcmd5_list = [ ]
for repository , archs in repositories :
for arch , status in archs :
if srcmd5_list :
break
if status not in ( ' succeeded ' , ' outdated ' ) :
continue
url = makeurl ( self . apiurl , ( ' build ' , src_project ,
repository , arch ,
' _jobhistory ' ) ,
query = query )
try :
root = ET . parse ( http_GET ( url ) ) . getroot ( )
srcmd5_list = [ e . get ( ' srcmd5 ' ) for e in root . findall ( ' jobhist ' ) ]
except urllib2 . HTTPError , e :
print ( ' ERROR in URL %s [ %s ] ' % ( url , e ) )
md5_set = set ( )
for srcmd5 in srcmd5_list :
query = {
' expand ' : 1 ,
' rev ' : srcmd5 ,
}
url = makeurl ( self . apiurl , ( ' source ' , src_project , src_package ) , query = query )
root = ET . parse ( http_GET ( url ) ) . getroot ( )
md5_set . add ( root . find ( ' linkinfo ' ) . get ( ' srcmd5 ' ) )
return md5_set
def check_specs ( self , request_id = None , request = None ) :
""" Check a single request and load the different SPECs files.
This method have side effects , it can ACCEPT or DECLINE
requests after some checks .
"""
requests = [ ]
if request_id :
2014-06-13 16:11:15 +02:00
request_id = int ( request_id )
2014-06-12 18:25:45 +02:00
request = self . get_request ( request_id )
elif request :
request_id = int ( request . get ( ' id ' ) )
else :
raise Exception ( ' Please, provide a request_id or a request XML object. ' )
2014-08-28 14:42:12 +02:00
self . debug ( " check_specs " , request_id )
2014-06-12 18:25:45 +02:00
# Check that only one action is allowed in the request.
actions = request . findall ( ' action ' )
if len ( actions ) > 1 :
msg = ' Only one action per request is supported '
2016-04-19 21:47:26 +08:00
print ( ' [DECLINED] ' , msg )
2014-06-12 18:25:45 +02:00
self . change_review_state ( request_id , ' declined ' , message = msg )
return requests
2014-07-17 11:36:05 +02:00
rq = Request ( element = request )
if rq . action_type != ' submit ' and rq . action_type != ' delete ' :
msg = ' Unchecked request type %s ' % rq . action_type
2014-06-12 18:25:45 +02:00
print ' ACCEPTED ' , msg
self . change_review_state ( request_id , ' accepted ' , message = msg )
return requests
rq . group = self . grouped . get ( request_id , request_id )
requests . append ( rq )
2014-07-17 11:36:05 +02:00
if rq . action_type == ' delete ' :
# only track the target package
return requests
2014-06-12 18:25:45 +02:00
# Get source information about the SR:
# - Source MD5
# - Entries (.tar.gz, .changes, .spec ...) and MD5
2014-06-25 10:42:31 +02:00
query = {
' rev ' : rq . revision ,
' expand ' : 1
}
2014-06-12 18:25:45 +02:00
try :
url = makeurl ( self . apiurl , [ ' source ' , rq . src_project , rq . src_package ] ,
2014-06-25 10:42:31 +02:00
query = query )
2014-06-12 18:25:45 +02:00
root = ET . parse ( http_GET ( url ) ) . getroot ( )
except urllib2 . HTTPError , e :
print ' ERROR in URL %s [ %s ] ' % ( url , e )
return requests
rq . srcmd5 = root . attrib [ ' srcmd5 ' ]
2014-07-04 13:34:53 +02:00
rq . verifymd5 = self . _get_verifymd5 ( rq , rq . srcmd5 )
2014-06-12 18:25:45 +02:00
# Recover the .spec files
specs = [ en . attrib [ ' name ' ] [ : - 5 ] for en in root . findall ( ' entry ' )
if en . attrib [ ' name ' ] . endswith ( ' .spec ' ) ]
2016-04-20 20:54:56 +08:00
# special case for glibc.i686, it have not the relevant specfile for glibc.i686
# but must be add it to requests list as a dummy request, otherwise the state
# has not be check and won't download it's binaries.
if ' glibc ' in specs :
specs . append ( ' glibc.i686 ' )
2014-08-22 09:56:05 +02:00
# source checker already validated it
if rq . src_package in specs :
specs . remove ( rq . src_package )
elif rq . tgt_package in specs :
specs . remove ( rq . tgt_package )
else :
msg = ' The name of the SPEC files %s do not match with the name of the package ( %s ) '
msg = msg % ( specs , rq . src_package )
2016-04-19 21:47:26 +08:00
print ( ' [DECLINED] ' , msg )
2014-08-22 09:56:05 +02:00
self . change_review_state ( request_id , ' declined ' , message = msg )
rq . updated = True
return requests
2014-06-12 18:25:45 +02:00
# Makes sure that the .spec file builds properly.
# In OBS the source container is the place where all the .spec
# files and .tgz files are stored, and used to build a binary
# package (.RPM) and a source package (.SRC.RPM)
#
# There are some rules in OBS here that we need to know:
#
# - There must be a .spec file that have the same name that
# the source container. For example, if the source
# container is python3-Pillow, we need a
# python3-Pillow.spec file.
#
# - If there are more .spec files, in case that we want to
# - build more packages, this is represented as a new source
# - container in OBS, that is a link to the original one but
# - with the name of the .spec file.
for spec in specs :
2014-06-25 10:42:31 +02:00
try :
2014-07-04 10:41:12 +02:00
spec_info = self . staging . get_package_information ( rq . src_project , spec )
2014-06-25 10:42:31 +02:00
except urllib2 . HTTPError as e :
2014-06-27 15:12:39 +02:00
rq . error = " Can ' t gather package information for ( %s , %s ) " % ( rq . src_project , spec )
2014-06-25 10:42:31 +02:00
rq . updated = True
2014-06-25 12:55:07 +02:00
continue
2014-07-23 13:05:33 +02:00
except KeyError as e :
# This exception happends some times when there is an
# 'error' attribute in the package information XML
2014-08-18 17:18:00 +02:00
rq . error = ' There is an error in the SPEC file for ( %s , %s ). ' % ( rq . src_project , spec )
2014-07-23 13:05:33 +02:00
rq . updated = True
continue
2014-06-12 18:25:45 +02:00
2015-08-28 12:58:02 +02:00
is_src_diff = ( spec_info [ ' project ' ] != rq . src_project or
spec_info [ ' package ' ] != rq . src_package )
if is_src_diff and not rq . updated :
2014-06-16 17:57:40 +02:00
msg = ' %s / %s should _link to %s / %s ' % ( rq . src_project ,
spec ,
rq . src_project ,
rq . src_package )
2016-04-19 21:47:26 +08:00
print ' [DECLINED] ' , msg
2014-06-12 18:25:45 +02:00
self . change_review_state ( rq . request_id , ' declined ' , message = msg )
rq . updated = True
if spec_info [ ' srcmd5 ' ] != rq . srcmd5 and not rq . updated :
2014-06-16 17:57:40 +02:00
if spec_info [ ' srcmd5 ' ] not in self . old_md5 ( rq . src_project ,
rq . tgt_project ,
spec ,
rq . srcmd5 ) :
msg = ' %s / %s is a link but has a different md5sum than %s ? ' % (
rq . src_project ,
spec ,
rq . src_package )
2014-06-12 18:25:45 +02:00
else :
msg = ' %s is no longer the submitted version, please resubmit HEAD ' % spec
2016-04-20 17:01:11 +08:00
print ' [WARNING] CHECK MANUALLY ' , msg
2014-07-21 14:33:48 +02:00
# self.change_review_state(id_, 'declined', message=msg)
2014-06-12 18:25:45 +02:00
rq . updated = True
sp = Request ( request_id = rq . request_id ,
2014-06-16 17:57:40 +02:00
src_project = rq . src_project ,
src_package = spec ,
tgt_project = rq . tgt_project ,
tgt_package = spec ,
revision = None ,
2014-07-04 10:41:12 +02:00
srcmd5 = rq . srcmd5 ,
2014-07-04 13:34:53 +02:00
verifymd5 = rq . verifymd5 ,
2014-08-19 10:22:32 +02:00
group = rq . group ,
is_shadow = rq . is_shadow ,
2014-08-20 09:27:45 +02:00
shadow_src_project = rq . shadow_src_project )
2014-06-12 18:25:45 +02:00
requests . append ( sp )
return requests
2014-06-16 17:57:40 +02:00
def repositories_to_check ( self , request ) :
""" Return the list of repositories that contains both Intel arch.
Each repository is an XML ElementTree from last_build_success .
"""
2014-06-26 15:29:53 +02:00
repos_to_check = [ ]
2016-03-23 17:44:20 +08:00
more_repo_candidates = [ ]
2014-06-26 15:29:53 +02:00
2015-08-28 12:58:02 +02:00
try :
root_xml = self . last_build_success ( request . shadow_src_project ,
request . tgt_project ,
request . src_package ,
request . verifymd5 )
except urllib2 . HTTPError as e :
if 300 < = e . code < = 499 :
print ' - The request is not built agains this project '
return repos_to_check
raise e
2014-06-26 15:29:53 +02:00
2015-08-28 12:58:02 +02:00
root = ET . fromstring ( root_xml )
2014-06-16 17:57:40 +02:00
for repo in root . findall ( ' repository ' ) :
2016-02-25 22:58:13 +08:00
valid_intel_repo = True
intel_archs = [ ]
for a in repo . findall ( ' arch ' ) :
if a . attrib [ ' arch ' ] not in ( ' i586 ' , ' x86_64 ' ) :
# It is not a common Factory i586/x86_64 build repository
# probably builds on ARM, PPC or images
valid_intel_repo = False
else :
# We assume it is standard Factory i586/x86_64 build repository
intel_archs . append ( a )
if not valid_intel_repo :
2016-03-23 17:44:20 +08:00
if len ( intel_archs ) == 2 :
# the possible repo candidate ie. complex build repos layout includes i586 and x86_64
more_repo_candidates . append ( repo )
2016-03-23 18:11:48 +08:00
continue
2016-02-25 22:58:13 +08:00
2014-06-16 17:57:40 +02:00
if len ( intel_archs ) == 2 :
repos_to_check . append ( repo )
2016-03-23 17:44:20 +08:00
if more_repo_candidates :
for repo in more_repo_candidates :
rpms = [ ]
# check if x86_64 package is exist
rpms = self . get_package_list_from_repository ( request . shadow_src_project , repo . attrib [ ' name ' ] , ' x86_64 ' , request . src_package )
if rpms :
# valid candidate
repos_to_check . append ( repo )
2014-06-16 17:57:40 +02:00
return repos_to_check
2014-06-25 10:42:31 +02:00
def is_binary ( self , project , repository , arch , package ) :
2014-06-16 17:57:40 +02:00
""" Return True if is a binary package. """
2014-06-25 10:42:31 +02:00
root_xml = self . build ( project , repository , arch , package )
2014-06-16 17:57:40 +02:00
root = ET . fromstring ( root_xml )
for binary in root . findall ( ' binary ' ) :
# If there are binaries, we're out.
return False
return True
2014-07-22 17:19:17 +02:00
def _get_binary_file ( self , project , repository , arch , package , filename , target , mtime ) :
2014-07-21 14:33:48 +02:00
""" Get a binary file from OBS. """
2014-07-22 17:19:17 +02:00
# Check if the file is already there.
2014-10-01 17:41:26 +02:00
key = ( project , repository , arch , package , filename , mtime )
if key in self . pkgcache :
try :
os . unlink ( target )
except :
pass
self . pkgcache . linkto ( key , target )
else :
get_binary_file ( self . apiurl , project , repository , arch ,
filename , package = package ,
target_filename = target )
self . pkgcache [ key ] = target
2014-07-21 14:33:48 +02:00
2014-07-21 16:29:49 +02:00
def _download ( self , request , todownload ) :
2014-07-23 14:23:17 +02:00
""" Download the packages referenced in the ' todownload ' list. """
2014-07-21 14:33:48 +02:00
last_disturl = None
last_disturldir = None
# We need to order the files to download. First RPM packages (to
# set disturl), after that the rest.
todownload_rpm = [ rpm for rpm in todownload if rpm [ 3 ] . endswith ( ' .rpm ' ) ]
todownload_rest = [ rpm for rpm in todownload if not rpm [ 3 ] . endswith ( ' .rpm ' ) ]
for _project , _repo , arch , fn , mt in todownload_rpm :
repodir = os . path . join ( DOWNLOADS , request . src_package , _project , _repo )
if not os . path . exists ( repodir ) :
os . makedirs ( repodir )
t = os . path . join ( repodir , fn )
2014-09-29 16:28:11 +02:00
self . _get_binary_file ( _project , _repo , arch , request . src_package , fn , t , mt )
2014-07-21 14:33:48 +02:00
# Organize the files into DISTURL directories.
disturl = self . _md5_disturl ( self . _disturl ( t ) )
disturldir = os . path . join ( repodir , disturl )
last_disturl , last_disturldir = disturl , disturldir
file_in_disturl = os . path . join ( disturldir , fn )
if not os . path . exists ( disturldir ) :
os . makedirs ( disturldir )
try :
os . symlink ( t , file_in_disturl )
except :
pass
# print 'Found previous link.'
request . downloads [ ( _project , _repo , disturl ) ] . append ( file_in_disturl )
2014-09-29 14:04:38 +02:00
# Some subpackage do not have any rpm (e.g. rpmlint)
if not last_disturldir :
return
2014-07-21 14:33:48 +02:00
for _project , _repo , arch , fn , mt in todownload_rest :
repodir = os . path . join ( DOWNLOADS , request . src_package , _project , _repo )
if not os . path . exists ( repodir ) :
os . makedirs ( repodir )
t = os . path . join ( repodir , fn )
2014-09-29 16:28:11 +02:00
self . _get_binary_file ( _project , _repo , arch , request . src_package , fn , t , mt )
2014-07-21 14:33:48 +02:00
file_in_disturl = os . path . join ( last_disturldir , fn )
if last_disturldir :
try :
os . symlink ( t , file_in_disturl )
except :
pass
# print 'Found previous link.'
else :
print " I don ' t know where to put " , fn
request . downloads [ ( _project , _repo , last_disturl ) ] . append ( file_in_disturl )
def _toignore ( self , request ) :
""" Return the list of files to ignore during the checkrepo. """
toignore = set ( )
for fn in self . get_package_list_from_repository (
request . tgt_project , ' standard ' , ' x86_64 ' , request . tgt_package ) :
if fn [ 1 ] :
toignore . add ( fn [ 1 ] )
# now fetch -32bit pack list
for fn in self . get_package_list_from_repository (
request . tgt_project , ' standard ' , ' i586 ' , request . tgt_package ) :
if fn [ 1 ] and fn [ 2 ] == ' x86_64 ' :
toignore . add ( fn [ 1 ] )
return toignore
2014-06-25 10:42:31 +02:00
def _disturl ( self , filename ) :
""" Get the DISTURL from a RPM file. """
2014-06-26 15:29:53 +02:00
pid = subprocess . Popen (
( ' rpm ' , ' --nosignature ' , ' --queryformat ' , ' % {DISTURL} ' , ' -qp ' , filename ) ,
stdout = subprocess . PIPE , close_fds = True )
2014-06-25 10:42:31 +02:00
os . waitpid ( pid . pid , 0 ) [ 1 ]
disturl = pid . stdout . readlines ( ) [ 0 ]
return disturl
def _md5_disturl ( self , disturl ) :
""" Get the md5 from the DISTURL from a RPM file. """
return os . path . basename ( disturl ) . split ( ' - ' ) [ 0 ]
2014-09-18 15:45:04 +02:00
@memoize ( session = True )
2014-06-25 10:42:31 +02:00
def _get_verifymd5 ( self , request , revision ) :
""" Return the verifymd5 attribute from a request. """
query = {
' view ' : ' info ' ,
' rev ' : revision ,
}
2014-06-26 15:29:53 +02:00
verifymd5 = ' '
2014-06-25 10:42:31 +02:00
try :
url = makeurl ( self . apiurl , ( ' source ' , request . src_project , request . src_package ) ,
query = query )
root = ET . parse ( http_GET ( url ) ) . getroot ( )
2014-06-26 15:29:53 +02:00
verifymd5 = root . attrib [ ' verifymd5 ' ]
2014-06-25 10:42:31 +02:00
except urllib2 . HTTPError , e :
print ' ERROR in URL %s [ %s ] ' % ( url , e )
2014-06-26 15:29:53 +02:00
return verifymd5
2014-06-25 10:42:31 +02:00
2014-07-01 19:55:17 +02:00
def check_disturl ( self , request , filename = None , md5_disturl = None ) :
2014-06-25 10:42:31 +02:00
""" Try to match the srcmd5 of a request with the one in the RPM package. """
2014-07-01 19:55:17 +02:00
if not filename and not md5_disturl :
raise ValueError ( ' Please, procide filename or md5_disturl ' )
2016-04-21 20:10:32 +08:00
# ugly workaround here, glibc.i686 had a topadd block in _link, and looks like
# it causes the disturl won't consistently with glibc even with the same srcmd5
if request . src_package == ' glibc.i686 ' :
return True
2014-07-01 19:55:17 +02:00
md5_disturl = md5_disturl if md5_disturl else self . _md5_disturl ( self . _disturl ( filename ) )
2014-07-04 10:41:12 +02:00
vrev_local = self . _get_verifymd5 ( request , md5_disturl )
2014-07-04 13:34:53 +02:00
2016-04-20 17:01:11 +08:00
# md5_disturl == request.srcmd5 is true for packages in the devel project.
# vrev_local == request.srcmd5 is true for kernel submission
# vrev_local == request.verifymd5 is ture for packages from different projects
if md5_disturl == request . srcmd5 or vrev_local in ( request . srcmd5 , request . verifymd5 ) :
2014-07-04 13:34:53 +02:00
return True
2016-04-20 17:01:11 +08:00
else :
msg = ' %s is no longer the submitted version in %s , please recheck! ' % ( request . src_package , request . src_project )
print ' [WARNING] CHECK MANUALLY ' , msg
2014-07-04 10:41:12 +02:00
2014-06-25 10:42:31 +02:00
return False
2014-06-16 17:57:40 +02:00
def is_buildsuccess ( self , request ) :
""" Return True if the request is correctly build
This method extend the Request object with the goodrepos
field .
: param request : Request object
: returns : True if the request is correctly build .
"""
2014-06-26 15:29:53 +02:00
# If the request do not build properly in both Intel platforms,
2014-06-16 17:57:40 +02:00
# return False.
2015-08-28 12:58:02 +02:00
try :
repos_to_check = self . repositories_to_check ( request )
except urllib2 . HTTPError as e :
if 500 < = e . code < = 599 :
print ' - Temporal error in OBS: %s %s ' % ( e . code , e . msg )
else :
print ' - Unknown error in OBS: %s %s ' % ( e . code , e . msg )
# Ignore this request until OBS error dissapears
request . updated = True
return False
2014-06-16 17:57:40 +02:00
if not repos_to_check :
msg = ' Missing i586 and x86_64 in the repo list '
2014-06-27 15:12:39 +02:00
print ' - %s ' % msg
2014-06-16 17:57:40 +02:00
self . change_review_state ( request . request_id , ' new ' , message = msg )
# Next line not needed, but for documentation.
request . updated = True
return False
2014-06-17 09:30:23 +02:00
result = False
2014-06-16 17:57:40 +02:00
alldisabled = True
foundbuilding = None
foundfailed = None
for repository in repos_to_check :
2015-02-24 17:57:19 +01:00
repo_name = repository . attrib [ ' name ' ]
2014-08-28 14:42:12 +02:00
self . debug ( " checking repo " , ET . tostring ( repository ) )
2014-06-16 17:57:40 +02:00
isgood = True
founddisabled = False
r_foundbuilding = None
r_foundfailed = None
2015-02-24 17:57:19 +01:00
missings = [ ]
2014-06-16 17:57:40 +02:00
for arch in repository . findall ( ' arch ' ) :
if arch . attrib [ ' arch ' ] not in ( ' i586 ' , ' x86_64 ' ) :
continue
2016-04-20 20:54:56 +08:00
if arch . attrib [ ' result ' ] == ' excluded ' :
if ( ( arch . attrib [ ' arch ' ] == ' x86_64 ' and request . src_package not in request . i686_only ) or
( arch . attrib [ ' arch ' ] == ' i586 ' and request . src_package in request . i686_only ) ) :
request . build_excluded = True
2014-06-16 17:57:40 +02:00
if ' missing ' in arch . attrib :
for package in arch . attrib [ ' missing ' ] . split ( ' , ' ) :
if not self . is_binary (
request . src_project ,
2015-02-24 17:57:19 +01:00
repo_name ,
2014-06-25 10:42:31 +02:00
arch . attrib [ ' arch ' ] ,
package ) :
2015-02-24 17:57:19 +01:00
missings . append ( package )
2014-06-16 17:57:40 +02:00
if arch . attrib [ ' result ' ] not in ( ' succeeded ' , ' excluded ' ) :
isgood = False
if arch . attrib [ ' result ' ] == ' disabled ' :
founddisabled = True
if arch . attrib [ ' result ' ] == ' failed ' or arch . attrib [ ' result ' ] == ' unknown ' :
# Sometimes an unknown status is equivalent to
# disabled, but we map it as failed to have a human
# check (no autoreject)
2015-02-24 17:57:19 +01:00
r_foundfailed = repo_name
2014-06-16 17:57:40 +02:00
if arch . attrib [ ' result ' ] == ' building ' :
2015-02-24 17:57:19 +01:00
r_foundbuilding = repo_name
2016-04-21 20:10:32 +08:00
# ugly workaround here, glibc.i686 had a topadd block in _link, and looks like
# it causes the disturl won't consistently with glibc even with the same srcmd5.
# and the build state per srcmd5 was outdated also.
if request . src_package == ' glibc.i686 ' :
if ( ( arch . attrib [ ' arch ' ] == ' i586 ' and arch . attrib [ ' result ' ] == ' outdated ' ) or
( arch . attrib [ ' arch ' ] == ' x86_64 ' and arch . attrib [ ' result ' ] == ' excluded ' ) ) :
isgood = True
continue
2014-06-16 17:57:40 +02:00
if arch . attrib [ ' result ' ] == ' outdated ' :
2016-04-12 11:02:34 +02:00
msg = " %s ' s sources were changed after submission: the relevant binaries are not available (never built or binaries replaced). Please resubmit " % request . src_package
2016-04-19 21:47:26 +08:00
print ' [DECLINED] ' , msg
2014-06-16 17:57:40 +02:00
self . change_review_state ( request . request_id , ' declined ' , message = msg )
# Next line is not needed, but for documentation
request . updated = True
return False
if not founddisabled :
alldisabled = False
if isgood :
2015-02-24 17:57:19 +01:00
_goodrepo = ( request . src_project , repo_name )
2014-08-28 14:42:12 +02:00
self . debug ( " good repo " , _goodrepo )
2014-07-22 17:19:17 +02:00
if _goodrepo not in request . goodrepos :
request . goodrepos . append ( _goodrepo )
2014-06-16 17:57:40 +02:00
result = True
if r_foundbuilding :
foundbuilding = r_foundbuilding
if r_foundfailed :
foundfailed = r_foundfailed
2015-02-24 17:57:19 +01:00
if missings :
request . missings [ repo_name ] = missings
2014-06-16 17:57:40 +02:00
2016-02-25 02:59:11 +08:00
# Need to return if result is True at this point
# Otherwise, it will returned False at some point, eg. an unknown status
if result :
return True
2014-06-16 17:57:40 +02:00
if alldisabled :
msg = ' %s is disabled or does not build against factory. Please fix and resubmit ' % request . src_package
2016-04-19 21:47:26 +08:00
print ' [DECLINED] ' , msg
2014-06-16 17:57:40 +02:00
self . change_review_state ( request . request_id , ' declined ' , message = msg )
# Next line not needed, but for documentation
request . updated = True
return False
2014-06-25 10:42:31 +02:00
2014-07-22 17:19:17 +02:00
if foundbuilding and ( request . src_package , foundbuilding ) not in request . goodrepos :
2014-06-16 17:57:40 +02:00
msg = ' %s is still building for repository %s ' % ( request . src_package , foundbuilding )
2014-06-27 15:12:39 +02:00
print ' - %s ' % msg
2014-06-26 15:29:53 +02:00
self . change_review_state ( request . request_id , ' new ' , message = msg )
# Next line not needed, but for documentation
request . updated = True
return False
2014-06-25 10:42:31 +02:00
2014-06-16 17:57:40 +02:00
if foundfailed :
msg = ' %s failed to build in repository %s - not accepting ' % ( request . src_package , foundfailed )
# failures might be temporary, so don't autoreject but wait for a human to check
2014-06-27 15:12:39 +02:00
print ' - %s ' % msg
2014-06-16 17:57:40 +02:00
self . change_review_state ( request . request_id , ' new ' , message = msg )
# Next line not needed, but for documentation
request . updated = True
return False
2016-02-25 02:59:11 +08:00
return False
2014-07-21 14:33:48 +02:00
def get_package_list_from_repository ( self , project , repository , arch , package ) :
url = makeurl ( self . apiurl , ( ' build ' , project , repository , arch , package ) )
files = [ ]
try :
binaries = ET . parse ( http_GET ( url ) ) . getroot ( )
for binary in binaries . findall ( ' binary ' ) :
filename = binary . attrib [ ' filename ' ]
mtime = int ( binary . attrib [ ' mtime ' ] )
result = re . match ( r ' (.*)-([^-]*)-([^-]*) \ .([^- \ .]+) \ .rpm ' , filename )
if not result :
if filename == ' rpmlint.log ' :
files . append ( ( filename , ' ' , ' ' , mtime ) )
continue
pname = result . group ( 1 )
if pname . endswith ( ' -debuginfo ' ) or pname . endswith ( ' -debuginfo-32bit ' ) :
continue
if pname . endswith ( ' -debugsource ' ) :
continue
if result . group ( 4 ) == ' src ' :
continue
files . append ( ( filename , pname , result . group ( 4 ) , mtime ) )
except urllib2 . HTTPError :
pass
# print " - WARNING: Can't found list of packages (RPM) for %s in %s (%s, %s)" % (
# package, project, repository, arch)
return files
2014-08-18 17:18:00 +02:00
def remove_link_if_shadow_devel ( self , request ) :
""" If the request is a shadow_devel (the reference is to a request
that is a link from the product to Factory ) , remove the link
to transform it as a normal request .
"""
if request . is_shadow_devel :
2014-08-21 17:00:37 +02:00
url = makeurl ( self . apiurl , ( ' source ' , request . shadow_src_project , request . src_package ) )
2014-08-27 13:22:23 +02:00
if self . readonly :
2014-09-01 09:43:44 +02:00
print ' DRY RUN: DELETE %s ' % url
2014-08-27 13:22:23 +02:00
else :
http_DELETE ( url )
2014-08-20 15:36:19 +02:00
for sub_prj , sub_pkg in self . staging . get_sub_packages ( request . src_package ,
request . shadow_src_project ) :
2014-08-21 17:00:37 +02:00
url = makeurl ( self . apiurl , ( ' source ' , sub_prj , sub_pkg ) )
2014-08-27 13:22:23 +02:00
if self . readonly :
2014-09-01 09:43:44 +02:00
print ' DRY RUN: DELETE %s ' % url
2014-08-27 13:22:23 +02:00
else :
http_DELETE ( url )
2014-08-21 17:00:37 +02:00
def _whatdependson ( self , request ) :
""" Return the list of packages that depends on the one in the
request .
"""
deps = set ( )
query = {
' package ' : request . tgt_package ,
' view ' : ' revpkgnames ' ,
}
for arch in ( ' i586 ' , ' x86_64 ' ) :
url = makeurl ( self . apiurl , ( ' build ' , request . tgt_project , ' standard ' , arch , ' _builddepinfo ' ) ,
query = query )
root = ET . parse ( http_GET ( url ) ) . getroot ( )
deps . update ( pkgdep . text for pkgdep in root . findall ( ' .//pkgdep ' ) )
return deps
2014-11-12 14:08:00 +01:00
def _builddepinfo ( self , project , package ) :
""" Return the list dependencies for a request. """
deps = set ( )
query = {
' package ' : package ,
}
for arch in ( ' i586 ' , ' x86_64 ' ) :
url = makeurl ( self . apiurl , ( ' build ' , project , ' standard ' , arch , ' _builddepinfo ' ) ,
query = query )
root = ET . parse ( http_GET ( url ) ) . getroot ( )
deps . update ( pkgdep . text for pkgdep in root . findall ( ' .//pkgdep ' ) )
return deps
2014-08-21 17:00:37 +02:00
def _maintainers ( self , request ) :
""" Get the maintainer of the package involved in the request. """
query = {
' binary ' : request . tgt_package ,
}
url = makeurl ( self . apiurl , ( ' search ' , ' owner ' ) , query = query )
root = ET . parse ( http_GET ( url ) ) . getroot ( )
return [ p . get ( ' name ' ) for p in root . findall ( ' .//person ' ) if p . get ( ' role ' ) == ' maintainer ' ]
def _author ( self , request ) :
""" Get the author of the request. """
2014-11-12 14:08:00 +01:00
query = {
' withhistory ' : 1 ,
}
url = makeurl ( self . apiurl , ( ' request ' , str ( request . request_id ) ) , query = query )
2014-08-21 17:00:37 +02:00
root = ET . parse ( http_GET ( url ) ) . getroot ( )
2014-10-31 10:24:06 +01:00
who = None
2014-08-21 17:00:37 +02:00
state = root . find ( ' state ' )
2014-10-31 10:24:06 +01:00
try :
if state . get ( ' name ' ) == ' new ' :
who = state . get ( ' who ' )
else :
who = root . find ( ' history ' ) . get ( ' who ' )
except Exception :
who = None
return who
2014-08-21 17:00:37 +02:00
2014-11-12 16:58:34 +01:00
def _project_maintainer ( self , request ) :
""" Get the list of maintainer of the target project. """
url = makeurl ( self . apiurl , ( ' source ' , request . tgt_project , ' _meta ' ) )
root = ET . parse ( http_GET ( url ) ) . getroot ( )
persons = [ e . get ( ' userid ' ) for e in root . findall ( ' .//person ' ) if e . get ( ' role ' ) == ' maintainer ' ]
return persons
2014-11-10 14:05:49 +01:00
def is_safe_to_delete ( self , request ) :
2014-08-21 17:00:37 +02:00
""" Return True is the request is secure to remove:
- Nothing depends on the package anymore .
- The request originates by the package maintainer .
"""
2014-11-10 14:05:49 +01:00
reasons = [ ]
2014-08-21 17:00:37 +02:00
whatdependson = self . _whatdependson ( request )
2014-11-12 16:58:34 +01:00
maintainers = self . _maintainers ( request )
author = self . _author ( request )
prj_maintainers = self . _project_maintainer ( request )
2014-11-12 14:08:00 +01:00
for dep in whatdependson :
deps = self . _builddepinfo ( request . tgt_project , dep )
if request . tgt_package in deps :
reasons . append ( ' %s still depends on %s in %s ' % ( dep , request . tgt_package , request . tgt_project ) )
2014-11-12 16:58:34 +01:00
if author not in maintainers and author not in prj_maintainers :
reasons . append ( ' The author ( %s ) is not one of the maintainers ( %s ) or a project maintainer in %s ' % (
author , ' , ' . join ( maintainers ) , request . tgt_project ) )
2014-11-10 14:05:49 +01:00
return ' . ' . join ( reasons )