mirror of
https://github.com/openSUSE/osc.git
synced 2025-09-08 05:58:43 +02:00
Merge branch 'master' of git://gitorious.org/opensuse/osc
This commit is contained in:
@@ -96,8 +96,8 @@ def run(prg):
|
||||
body = e.read()
|
||||
if getattr(prg.options, 'debug', None) or \
|
||||
getattr(prg.conf, 'config', {}).get('debug', None):
|
||||
print >>sys.stderr, e.hdrs
|
||||
print >>sys.stderr, body
|
||||
print >>sys.stderr, e.hdrs
|
||||
print >>sys.stderr, body
|
||||
|
||||
if e.code in [ 400, 403, 404, 500 ]:
|
||||
if '<summary>' in body:
|
||||
@@ -119,7 +119,7 @@ def run(prg):
|
||||
print >>sys.stderr, e.msg
|
||||
if getattr(prg.options, 'debug', None) or \
|
||||
getattr(prg.conf, 'config', {}).get('debug', None):
|
||||
print >>sys.stderr, e.e
|
||||
print >>sys.stderr, e.e
|
||||
return 1
|
||||
|
||||
except (oscerr.WrongOptions, oscerr.WrongArgs), e:
|
||||
|
34
osc/build.py
34
osc/build.py
@@ -231,27 +231,27 @@ def get_built_files(pacdir, pactype):
|
||||
|
||||
def get_repo(path):
|
||||
"""Walks up path looking for any repodata directories.
|
||||
|
||||
|
||||
@param path path to a directory
|
||||
@return str path to repository directory containing repodata directory
|
||||
"""
|
||||
oldDirectory = None
|
||||
currentDirectory = os.path.abspath(path)
|
||||
repositoryDirectory = None
|
||||
|
||||
|
||||
# while there are still parent directories
|
||||
while currentDirectory != oldDirectory:
|
||||
children = os.listdir(currentDirectory)
|
||||
|
||||
|
||||
if "repodata" in children:
|
||||
repositoryDirectory = currentDirectory
|
||||
break
|
||||
|
||||
|
||||
# ascend
|
||||
oldDirectory = currentDirectory
|
||||
currentDirectory = os.path.abspath(os.path.join(oldDirectory,
|
||||
os.pardir))
|
||||
|
||||
|
||||
return repositoryDirectory
|
||||
|
||||
def get_prefer_pkgs(dirs, wanted_arch, type):
|
||||
@@ -259,11 +259,11 @@ def get_prefer_pkgs(dirs, wanted_arch, type):
|
||||
from util import repodata, packagequery, cpio
|
||||
paths = []
|
||||
repositories = []
|
||||
|
||||
|
||||
suffix = '*.rpm'
|
||||
if type == 'dsc':
|
||||
suffix = '*.deb'
|
||||
|
||||
|
||||
for dir in dirs:
|
||||
# check for repodata
|
||||
repository = get_repo(dir)
|
||||
@@ -271,15 +271,15 @@ def get_prefer_pkgs(dirs, wanted_arch, type):
|
||||
paths += glob.glob(os.path.join(os.path.abspath(dir), suffix))
|
||||
else:
|
||||
repositories.append(repository)
|
||||
|
||||
|
||||
packageQueries = packagequery.PackageQueries(wanted_arch)
|
||||
|
||||
|
||||
for repository in repositories:
|
||||
repodataPackageQueries = repodata.queries(repository)
|
||||
|
||||
|
||||
for packageQuery in repodataPackageQueries:
|
||||
packageQueries.add(packageQuery)
|
||||
|
||||
|
||||
for path in paths:
|
||||
if path.endswith('src.rpm'):
|
||||
continue
|
||||
@@ -287,10 +287,10 @@ def get_prefer_pkgs(dirs, wanted_arch, type):
|
||||
continue
|
||||
packageQuery = packagequery.PackageQuery.query(path)
|
||||
packageQueries.add(packageQuery)
|
||||
|
||||
|
||||
prefer_pkgs = dict((name, packageQuery.path())
|
||||
for name, packageQuery in packageQueries.iteritems())
|
||||
|
||||
|
||||
depfile = create_deps(packageQueries.values())
|
||||
cpio = cpio.CpioWrite()
|
||||
cpio.add('deps', '\n'.join(depfile))
|
||||
@@ -582,7 +582,7 @@ def main(opts, argv):
|
||||
# OBS 1.5 and before has no downloadurl defined in buildinfo
|
||||
if bi.downloadurl:
|
||||
urllist.append(bi.downloadurl + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s')
|
||||
if not opts.cpio_bulk_download:
|
||||
if not opts.cpio_bulk_download:
|
||||
urllist.append( '%(apiurl)s/build/%(project)s/%(repository)s/%(repoarch)s/%(repopackage)s/%(repofilename)s' )
|
||||
|
||||
fetcher = Fetcher(cachedir = config['packagecachedir'],
|
||||
@@ -623,9 +623,9 @@ def main(opts, argv):
|
||||
os.makedirs(os.path.join(pradir))
|
||||
if not os.path.exists(tffn):
|
||||
if opts.linksources:
|
||||
os.link(sffn, tffn)
|
||||
os.link(sffn, tffn)
|
||||
else:
|
||||
os.symlink(sffn, tffn)
|
||||
os.symlink(sffn, tffn)
|
||||
|
||||
if bi.pacsuffix == 'rpm':
|
||||
if opts.no_verify or opts.noinit:
|
||||
@@ -654,7 +654,7 @@ def main(opts, argv):
|
||||
else:
|
||||
print 'WARNING: deb packages get not verified, they can compromise your system !'
|
||||
else:
|
||||
print 'WARNING: unknown packages get not verified, they can compromise your system !'
|
||||
print 'WARNING: unknown packages get not verified, they can compromise your system !'
|
||||
|
||||
print 'Writing build configuration'
|
||||
|
||||
|
151
osc/checker.py
151
osc/checker.py
@@ -7,96 +7,95 @@ import rpm
|
||||
import base64
|
||||
|
||||
class KeyError(Exception):
|
||||
def __init__(self, key, *args):
|
||||
Exception.__init__(self)
|
||||
self.args = args
|
||||
self.key = key
|
||||
def __str__(self):
|
||||
return ''+self.key+' :'+' '.join(self.args)
|
||||
def __init__(self, key, *args):
|
||||
Exception.__init__(self)
|
||||
self.args = args
|
||||
self.key = key
|
||||
def __str__(self):
|
||||
return ''+self.key+' :'+' '.join(self.args)
|
||||
|
||||
class Checker:
|
||||
def __init__(self):
|
||||
self.dbdir = mkdtemp(prefix='oscrpmdb')
|
||||
self.imported = {}
|
||||
rpm.addMacro('_dbpath', self.dbdir)
|
||||
self.ts = rpm.TransactionSet()
|
||||
self.ts.initDB()
|
||||
self.ts.openDB()
|
||||
self.ts.setVSFlags(0)
|
||||
#self.ts.Debug(1)
|
||||
def __init__(self):
|
||||
self.dbdir = mkdtemp(prefix='oscrpmdb')
|
||||
self.imported = {}
|
||||
rpm.addMacro('_dbpath', self.dbdir)
|
||||
self.ts = rpm.TransactionSet()
|
||||
self.ts.initDB()
|
||||
self.ts.openDB()
|
||||
self.ts.setVSFlags(0)
|
||||
#self.ts.Debug(1)
|
||||
|
||||
def readkeys(self, keys=[]):
|
||||
rpm.addMacro('_dbpath', self.dbdir)
|
||||
for key in keys:
|
||||
self.readkey(key)
|
||||
def readkeys(self, keys=[]):
|
||||
rpm.addMacro('_dbpath', self.dbdir)
|
||||
for key in keys:
|
||||
self.readkey(key)
|
||||
|
||||
rpm.delMacro("_dbpath")
|
||||
rpm.delMacro("_dbpath")
|
||||
|
||||
# python is an idiot
|
||||
# def __del__(self):
|
||||
# self.cleanup()
|
||||
# def __del__(self):
|
||||
# self.cleanup()
|
||||
|
||||
def cleanup(self):
|
||||
self.ts.closeDB()
|
||||
rmtree(self.dbdir)
|
||||
def cleanup(self):
|
||||
self.ts.closeDB()
|
||||
rmtree(self.dbdir)
|
||||
|
||||
def readkey(self, file):
|
||||
if file in self.imported:
|
||||
return
|
||||
def readkey(self, file):
|
||||
if file in self.imported:
|
||||
return
|
||||
|
||||
fd = open(file, "r")
|
||||
line = fd.readline()
|
||||
if line and line[0:14] == "-----BEGIN PGP":
|
||||
line = fd.readline()
|
||||
while line and line != "\n":
|
||||
line = fd.readline()
|
||||
if not line:
|
||||
raise KeyError(file, "not a pgp public key")
|
||||
else:
|
||||
raise KeyError(file, "not a pgp public key")
|
||||
|
||||
key = ''
|
||||
line = fd.readline()
|
||||
while line:
|
||||
if line[0:12] == "-----END PGP":
|
||||
break
|
||||
line = line.rstrip()
|
||||
key += line
|
||||
line = fd.readline()
|
||||
fd.close()
|
||||
if not line or line[0:12] != "-----END PGP":
|
||||
raise KeyError(file, "not a pgp public key")
|
||||
fd = open(file, "r")
|
||||
line = fd.readline()
|
||||
if line and line[0:14] == "-----BEGIN PGP":
|
||||
line = fd.readline()
|
||||
while line and line != "\n":
|
||||
line = fd.readline()
|
||||
if not line:
|
||||
raise KeyError(file, "not a pgp public key")
|
||||
else:
|
||||
raise KeyError(file, "not a pgp public key")
|
||||
|
||||
bkey = base64.b64decode(key)
|
||||
key = ''
|
||||
line = fd.readline()
|
||||
while line:
|
||||
if line[0:12] == "-----END PGP":
|
||||
break
|
||||
line = line.rstrip()
|
||||
key += line
|
||||
line = fd.readline()
|
||||
fd.close()
|
||||
if not line or line[0:12] != "-----END PGP":
|
||||
raise KeyError(file, "not a pgp public key")
|
||||
|
||||
r = self.ts.pgpImportPubkey(bkey)
|
||||
if r != 0:
|
||||
raise KeyError(file, "failed to import pubkey")
|
||||
self.imported[file] = 1
|
||||
bkey = base64.b64decode(key)
|
||||
|
||||
def check(self, pkg):
|
||||
fd = os.open(pkg, os.O_RDONLY)
|
||||
hdr = self.ts.hdrFromFdno(fd)
|
||||
os.close(fd)
|
||||
r = self.ts.pgpImportPubkey(bkey)
|
||||
if r != 0:
|
||||
raise KeyError(file, "failed to import pubkey")
|
||||
self.imported[file] = 1
|
||||
|
||||
def check(self, pkg):
|
||||
fd = os.open(pkg, os.O_RDONLY)
|
||||
hdr = self.ts.hdrFromFdno(fd)
|
||||
os.close(fd)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
keyfiles = []
|
||||
pkgs = []
|
||||
for arg in sys.argv[1:]:
|
||||
if arg[-4:] == '.rpm':
|
||||
pkgs.append(arg)
|
||||
else:
|
||||
keyfiles.append(arg)
|
||||
|
||||
checker = Checker()
|
||||
try:
|
||||
checker.readkeys(keyfiles)
|
||||
for pkg in pkgs:
|
||||
checker.check(pkg)
|
||||
except Exception, e:
|
||||
checker.cleanup()
|
||||
raise e
|
||||
import sys
|
||||
keyfiles = []
|
||||
pkgs = []
|
||||
for arg in sys.argv[1:]:
|
||||
if arg[-4:] == '.rpm':
|
||||
pkgs.append(arg)
|
||||
else:
|
||||
keyfiles.append(arg)
|
||||
|
||||
checker = Checker()
|
||||
try:
|
||||
checker.readkeys(keyfiles)
|
||||
for pkg in pkgs:
|
||||
checker.check(pkg)
|
||||
except Exception, e:
|
||||
checker.cleanup()
|
||||
raise e
|
||||
|
||||
# vim: sw=4 et
|
||||
|
@@ -28,11 +28,11 @@ MAN_FOOTER = r"""
|
||||
Type 'osc help <subcommand>' for more detailed help on a specific subcommand.
|
||||
.PP
|
||||
For additional information, see
|
||||
* http://www.opensuse.org/Build_Service_Tutorial
|
||||
* http://www.opensuse.org/Build_Service/CLI
|
||||
* http://en.opensuse.org/Build_Service_Tutorial
|
||||
* http://en.opensuse.org/Build_Service/CLI
|
||||
.PP
|
||||
You can modify osc commands, or roll you own, via the plugin API:
|
||||
* http://www.opensuse.org/Build_Service/osc_plugins
|
||||
* http://en.opensuse.org/Build_Service/osc_plugins
|
||||
.SH AUTHOR
|
||||
osc was written by several authors. This man page is automatically generated.
|
||||
"""
|
||||
@@ -48,11 +48,11 @@ class Osc(cmdln.Cmdln):
|
||||
${help_list}
|
||||
global ${option_list}
|
||||
For additional information, see
|
||||
* http://www.opensuse.org/Build_Service_Tutorial
|
||||
* http://www.opensuse.org/Build_Service/CLI
|
||||
* http://en.opensuse.org/Build_Service_Tutorial
|
||||
* http://en.opensuse.org/Build_Service/CLI
|
||||
|
||||
You can modify osc commands, or roll you own, via the plugin API:
|
||||
* http://www.opensuse.org/Build_Service/osc_plugins
|
||||
* http://en.opensuse.org/Build_Service/osc_plugins
|
||||
"""
|
||||
name = 'osc'
|
||||
conf = None
|
||||
@@ -388,12 +388,12 @@ class Osc(cmdln.Cmdln):
|
||||
for p in meta_get_packagelist(apiurl, project):
|
||||
if p.startswith("_patchinfo:"):
|
||||
patchinfo = p
|
||||
|
||||
|
||||
if opts.force or not patchinfo:
|
||||
print "Creating initial patchinfo..."
|
||||
query='cmd=createpatchinfo'
|
||||
if args and args[0]:
|
||||
query += "&name=" + args[0]
|
||||
query += "&name=" + args[0]
|
||||
url = makeurl(apiurl, ['source', project], query=query)
|
||||
f = http_POST(url)
|
||||
for p in meta_get_packagelist(apiurl, project):
|
||||
@@ -777,8 +777,8 @@ class Osc(cmdln.Cmdln):
|
||||
for p in pac:
|
||||
result = create_submit_request(apiurl, project, p)
|
||||
if not result:
|
||||
# sys.exit(result)
|
||||
sys.exit("submit request creation failed")
|
||||
# sys.exit(result)
|
||||
sys.exit("submit request creation failed")
|
||||
sr_ids.append(result)
|
||||
|
||||
# create submit requests for all found patchinfos
|
||||
@@ -798,13 +798,13 @@ class Osc(cmdln.Cmdln):
|
||||
(actionxml, cgi.escape(opts.message or ""))
|
||||
u = makeurl(apiurl, ['request'], query='cmd=create')
|
||||
f = http_POST(u, data=xml)
|
||||
|
||||
|
||||
root = ET.parse(f).getroot()
|
||||
sr_ids.append(root.get('id'))
|
||||
|
||||
print "Requests created: ",
|
||||
for i in sr_ids:
|
||||
print i,
|
||||
print i,
|
||||
sys.exit('Successfull finished')
|
||||
|
||||
elif len(args) <= 2:
|
||||
@@ -1589,7 +1589,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
package, tproject)
|
||||
|
||||
if r is None:
|
||||
print >>sys.stderr, 'ERROR: Attribute branch call came not back with a project.'
|
||||
print >>sys.stderr, 'ERROR: Attribute branch call came not back with a project.'
|
||||
sys.exit(1)
|
||||
|
||||
print "Project " + r + " created."
|
||||
@@ -1942,9 +1942,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
args = slash_split(args)
|
||||
args = expand_proj_pack(args)
|
||||
|
||||
## FIXME:
|
||||
## FIXME:
|
||||
## if there is only one argument, and it ends in .ymp
|
||||
## then fetch it, Parse XML to get the first
|
||||
## then fetch it, Parse XML to get the first
|
||||
## metapackage.group.repositories.repository.url
|
||||
## and construct zypper cmd's for all
|
||||
## metapackage.group.software.item.name
|
||||
@@ -2698,7 +2698,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
opts.name_filter = None
|
||||
opts.status_filter = None
|
||||
opts.vertical = None
|
||||
self.do_prjresults('prjresults', opts, *args);
|
||||
self.do_prjresults('prjresults', opts, *args)
|
||||
sys.exit(0)
|
||||
else:
|
||||
project = store_read_project(wd)
|
||||
@@ -2819,21 +2819,21 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
|
||||
|
||||
def print_repos(self):
|
||||
wd = os.curdir
|
||||
doprint = False
|
||||
if is_package_dir(wd):
|
||||
str = "package"
|
||||
doprint = True
|
||||
elif is_project_dir(wd):
|
||||
str = "project"
|
||||
doprint = True
|
||||
|
||||
if doprint:
|
||||
print 'Valid arguments for this %s are:' % str
|
||||
print
|
||||
self.do_repos(None, None)
|
||||
print
|
||||
raise oscerr.WrongArgs('Missing arguments')
|
||||
wd = os.curdir
|
||||
doprint = False
|
||||
if is_package_dir(wd):
|
||||
str = "package"
|
||||
doprint = True
|
||||
elif is_project_dir(wd):
|
||||
str = "project"
|
||||
doprint = True
|
||||
|
||||
if doprint:
|
||||
print 'Valid arguments for this %s are:' % str
|
||||
print
|
||||
self.do_repos(None, None)
|
||||
print
|
||||
raise oscerr.WrongArgs('Missing arguments')
|
||||
|
||||
@cmdln.alias('rbl')
|
||||
@cmdln.alias('rbuildlog')
|
||||
@@ -2864,7 +2864,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
raise oscerr.WrongArgs('Too many arguments.')
|
||||
else:
|
||||
project, package, repository, arch = args
|
||||
|
||||
|
||||
offset=0
|
||||
if opts.start:
|
||||
offset = int(opts.start)
|
||||
@@ -2954,15 +2954,15 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
wd = os.curdir
|
||||
args = slash_split(args)
|
||||
project = package = repository = arch = None
|
||||
|
||||
|
||||
if len(args) < 2:
|
||||
self.print_repos()
|
||||
|
||||
if len(args) == 2: # 2
|
||||
if is_package_dir('.'):
|
||||
package = store_read_package(wd)
|
||||
package = store_read_package(wd)
|
||||
else:
|
||||
raise oscerr.WrongArgs('package is not specified.')
|
||||
raise oscerr.WrongArgs('package is not specified.')
|
||||
project = store_read_project(wd)
|
||||
apiurl = store_read_apiurl(wd)
|
||||
repository = args[0]
|
||||
@@ -2982,9 +2982,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
reason = root.find('explain').text
|
||||
print reason
|
||||
if reason == "meta change":
|
||||
print "changed keys:"
|
||||
for package in root.findall('packagechange'):
|
||||
print " ", package.get('change'), package.get('key')
|
||||
print "changed keys:"
|
||||
for package in root.findall('packagechange'):
|
||||
print " ", package.get('change'), package.get('key')
|
||||
|
||||
|
||||
# FIXME: the new osc syntax should allow to specify multiple packages
|
||||
@@ -2997,9 +2997,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
will be triggered when a certain package changes.
|
||||
This is no guarantee, since the new build might have changed dependencies.
|
||||
|
||||
dependson shows the build dependencies inside of a project, valid for a
|
||||
dependson shows the build dependencies inside of a project, valid for a
|
||||
given repository and architecture.
|
||||
NOTE: to see all binary packages, which can trigger a build you need to
|
||||
NOTE: to see all binary packages, which can trigger a build you need to
|
||||
refer the buildinfo, since this command shows only the dependencies
|
||||
inside of a project.
|
||||
|
||||
@@ -3019,7 +3019,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
wd = os.curdir
|
||||
args = slash_split(args)
|
||||
project = packages = repository = arch = reverse = None
|
||||
|
||||
|
||||
if len(args) < 2 and (is_package_dir('.') or is_project_dir('.')):
|
||||
self.print_repos()
|
||||
|
||||
@@ -3028,9 +3028,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
|
||||
if len(args) < 3: # 2
|
||||
if is_package_dir('.'):
|
||||
packages = [store_read_package(wd)]
|
||||
packages = [store_read_package(wd)]
|
||||
elif not is_project_dir('.'):
|
||||
raise oscerr.WrongArgs('Project and package is not specified.')
|
||||
raise oscerr.WrongArgs('Project and package is not specified.')
|
||||
project = store_read_project(wd)
|
||||
apiurl = store_read_apiurl(wd)
|
||||
repository = args[0]
|
||||
@@ -3058,7 +3058,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
for package in root.findall('package'):
|
||||
print package.get('name'), ":"
|
||||
for dep in package.findall('pkgdep'):
|
||||
print " ", dep.text
|
||||
print " ", dep.text
|
||||
|
||||
|
||||
@cmdln.option('-x', '--extra-pkgs', metavar='PAC', action='append',
|
||||
@@ -3090,7 +3090,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
"""
|
||||
wd = os.curdir
|
||||
args = slash_split(args)
|
||||
|
||||
|
||||
if len(args) < 2 and is_package_dir('.'):
|
||||
self.print_repos()
|
||||
|
||||
@@ -3151,7 +3151,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
|
||||
wd = os.curdir
|
||||
args = slash_split(args)
|
||||
|
||||
|
||||
if len(args) < 2 and is_package_dir('.'):
|
||||
self.print_repos()
|
||||
|
||||
@@ -4123,7 +4123,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
extra_limiter='attribute/@name="%s"' % (opts.limit_to_attribute)
|
||||
if not opts.substring:
|
||||
opts.exact = True
|
||||
|
||||
|
||||
|
||||
role_filter=None
|
||||
if for_user:
|
||||
@@ -4376,7 +4376,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
def do_bugowner(self, subcmd, opts, *args):
|
||||
"""${cmd_name}: Show bugowners of a project/package
|
||||
|
||||
osc bugowner PRJ
|
||||
osc bugowner PRJ
|
||||
osc bugowner PRJ PKG
|
||||
|
||||
Shortcut for osc maintainer -B [PRJ] PKG
|
||||
@@ -4819,7 +4819,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
|
||||
dir_oldpatched = { 'apiurl': p.apiurl, 'project': p.prjname, 'package': p.name, 'srcmd5': p.srcmd5 }
|
||||
dir_oldpatched['entries'] = [[f.name, f.md5] for f in p.filelist]
|
||||
|
||||
|
||||
query = { 'rev': linkinfo.srcmd5 }
|
||||
u = makeurl(p.apiurl, ['source', linkinfo.project, linkinfo.package], query=query)
|
||||
f = http_GET(u)
|
||||
|
14
osc/conf.py
14
osc/conf.py
@@ -156,7 +156,7 @@ apiurl = %(apiurl)s
|
||||
# This is convenient when sharing the buildroot with ordinary userids
|
||||
# on the host.
|
||||
# This should not be 0
|
||||
# build-uid =
|
||||
# build-uid =
|
||||
|
||||
# extra packages to install when building packages locally (osc build)
|
||||
# this corresponds to osc build's -x option and can be overridden with that
|
||||
@@ -325,12 +325,12 @@ def init_basicauth(config):
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 6):
|
||||
# HTTPS proxy is not supported in old urllib2. It only leads to an error
|
||||
# or, at best, a warning.
|
||||
if 'https_proxy' in os.environ:
|
||||
del os.environ['https_proxy']
|
||||
if 'HTTPS_PROXY' in os.environ:
|
||||
del os.environ['HTTPS_PROXY']
|
||||
# HTTPS proxy is not supported in old urllib2. It only leads to an error
|
||||
# or, at best, a warning.
|
||||
if 'https_proxy' in os.environ:
|
||||
del os.environ['https_proxy']
|
||||
if 'HTTPS_PROXY' in os.environ:
|
||||
del os.environ['HTTPS_PROXY']
|
||||
|
||||
if config['http_debug']:
|
||||
# brute force
|
||||
|
26
osc/core.py
26
osc/core.py
@@ -1727,7 +1727,7 @@ def parse_buildlogurl(buildlogurl):
|
||||
m = BUILDLOGURL_RE.match(buildlogurl)
|
||||
if not m:
|
||||
raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
|
||||
|
||||
|
||||
return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
|
||||
|
||||
def slash_split(l):
|
||||
@@ -2139,17 +2139,17 @@ def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with
|
||||
path.append('source')
|
||||
path.append(prj)
|
||||
if pac:
|
||||
path.append(pac)
|
||||
path.append(pac)
|
||||
if pac and subpac:
|
||||
path.append(subpac)
|
||||
path.append(subpac)
|
||||
path.append('_attribute')
|
||||
if attribute:
|
||||
path.append(attribute)
|
||||
path.append(attribute)
|
||||
query=[]
|
||||
if with_defaults:
|
||||
query.append("with_default=1")
|
||||
query.append("with_default=1")
|
||||
if with_project:
|
||||
query.append("with_project=1")
|
||||
query.append("with_project=1")
|
||||
url = makeurl(apiurl, path, query)
|
||||
try:
|
||||
f = http_GET(url)
|
||||
@@ -2547,10 +2547,10 @@ def create_submit_request(apiurl,
|
||||
# Yes, this kind of xml construction is horrible
|
||||
targetxml = ""
|
||||
if dst_project:
|
||||
packagexml = ""
|
||||
if dst_package:
|
||||
packagexml = """package="%s" """ %( dst_package )
|
||||
targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
|
||||
packagexml = ""
|
||||
if dst_package:
|
||||
packagexml = """package="%s" """ %( dst_package )
|
||||
targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
|
||||
# XXX: keep the old template for now in order to work with old obs instances
|
||||
xml = """\
|
||||
<request type="submit">
|
||||
@@ -3496,7 +3496,9 @@ def get_results(apiurl, prj, package, lastbuild=None, repository=[], arch=[]):
|
||||
rmap['status'] = ''
|
||||
|
||||
if rmap['status'] in ['expansion error', 'broken', 'blocked', 'finished']:
|
||||
rmap['status'] += ': ' + statusnode.find('details').text
|
||||
details = statusnode.find('details')
|
||||
if details != None:
|
||||
rmap['status'] += ': ' + details.text
|
||||
|
||||
if rmap['dirty'] == 'true':
|
||||
rmap['status'] = 'state is outdated (was: %s)' % rmap['status']
|
||||
@@ -3644,7 +3646,7 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non
|
||||
for i in range(0, len(targets)):
|
||||
line.append(str(i%10))
|
||||
r.append(' '.join(line))
|
||||
|
||||
|
||||
r.append('')
|
||||
|
||||
if not hide_legend and len(pacs):
|
||||
|
@@ -114,9 +114,9 @@ class Fetcher:
|
||||
pkgq = packagequery.PackageQuery.query(tmpfile, extra_rpmtags=(1044, 1051, 1052))
|
||||
arch = pkgq.arch()
|
||||
# SOURCERPM = 1044
|
||||
if pkgq.filename_suffix == 'rpm' and not pkgq.getTag(1044):
|
||||
if pkgq.filename_suffix == 'rpm' and not pkgq.gettag(1044):
|
||||
# NOSOURCE = 1051, NOPATCH = 1052
|
||||
if pkgq.getTag(1051) or pkgq.getTag(1052):
|
||||
if pkgq.gettag(1051) or pkgq.gettag(1052):
|
||||
arch = "nosrc"
|
||||
else:
|
||||
arch = "src"
|
||||
|
@@ -176,7 +176,7 @@ class myHTTPSHandler(M2Crypto.m2urllib2.HTTPSHandler):
|
||||
|
||||
# Our change: Check to see if we're using a proxy.
|
||||
# Then create an appropriate ssl-aware connection.
|
||||
full_url = req.get_full_url()
|
||||
full_url = req.get_full_url()
|
||||
target_host = urlparse.urlparse(full_url)[1]
|
||||
|
||||
if (target_host != host):
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
class NoSecureSSLError(Exception):
|
||||
def __init__(self, msg):
|
||||
Exception.__init__(self)
|
||||
self.msg = msg
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
@@ -104,7 +104,7 @@ class DebQuery(packagequery.PackageQuery):
|
||||
def requires(self):
|
||||
return self.fields['depends']
|
||||
|
||||
def getTag(self, num):
|
||||
def gettag(self, num):
|
||||
return self.fields.get(num, None)
|
||||
|
||||
@staticmethod
|
||||
|
@@ -9,36 +9,36 @@ class PackageQueries(dict):
|
||||
package query, to a name, the package is evaluated to see if it matches the
|
||||
wanted architecture and if it has a greater version than the current value.
|
||||
"""
|
||||
|
||||
|
||||
# map debian arches to common obs arches
|
||||
architectureMap = {'i386': ['i586', 'i686'], 'amd64': ['x86_64']}
|
||||
|
||||
def __init__(self, wantedArchitecture):
|
||||
self.wantedArchitecture = wantedArchitecture
|
||||
architectureMap = {'i386': ['i586', 'i686'], 'amd64': ['x86_64']}
|
||||
|
||||
def __init__(self, wanted_architecture):
|
||||
self.wanted_architecture = wanted_architecture
|
||||
super(PackageQueries, self).__init__()
|
||||
|
||||
|
||||
def add(self, query):
|
||||
"""Adds package query to dict if it is of the correct architecture and
|
||||
is newer (has a greater version) than the currently assigned package.
|
||||
|
||||
|
||||
@param a PackageQuery
|
||||
"""
|
||||
self.__setitem__(query.name(), query)
|
||||
|
||||
|
||||
def __setitem__(self, name, query):
|
||||
if name != query.name():
|
||||
raise ValueError("key '%s' does not match "
|
||||
"package query name '%s'" % (name, query.name()))
|
||||
|
||||
|
||||
architecture = query.arch()
|
||||
|
||||
if (architecture in [self.wantedArchitecture, 'noarch', 'all'] or
|
||||
self.wantedArchitecture in self.architectureMap.get(architecture,
|
||||
|
||||
if (architecture in [self.wanted_architecture, 'noarch', 'all'] or
|
||||
self.wanted_architecture in self.architectureMap.get(architecture,
|
||||
[])):
|
||||
currentQuery = self.get(name)
|
||||
|
||||
current_query = self.get(name)
|
||||
|
||||
# if current query does not exist or is older than this new query
|
||||
if currentQuery is None or currentQuery.vercmp(query) <= 0:
|
||||
if current_query is None or current_query.vercmp(query) <= 0:
|
||||
super(PackageQueries, self).__setitem__(name, query)
|
||||
|
||||
class PackageQuery:
|
||||
@@ -63,20 +63,20 @@ class PackageQuery:
|
||||
|
||||
def description(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def path(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def provides(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def requires(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def getTag(self):
|
||||
def gettag(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def vercmp(self, pkgq):
|
||||
def vercmp(self, pkgquery):
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
@@ -85,20 +85,20 @@ class PackageQuery:
|
||||
magic = f.read(7)
|
||||
f.seek(0)
|
||||
extra_tags = ()
|
||||
pkgq = None
|
||||
pkgquery = None
|
||||
if magic[:4] == '\xed\xab\xee\xdb':
|
||||
import rpmquery
|
||||
pkgq = rpmquery.RpmQuery(f)
|
||||
pkgquery = rpmquery.RpmQuery(f)
|
||||
extra_tags = extra_rpmtags
|
||||
elif magic == '!<arch>':
|
||||
import debquery
|
||||
pkgq = debquery.DebQuery(f)
|
||||
pkgquery = debquery.DebQuery(f)
|
||||
extra_tags = extra_debtags
|
||||
else:
|
||||
raise PackageError('unsupported package type. magic: \'%s\' (%s)' % (magic, filename))
|
||||
pkgq.read(all_tags, *extra_tags)
|
||||
pkgquery.read(all_tags, *extra_tags)
|
||||
f.close()
|
||||
return pkgq
|
||||
return pkgquery
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
@@ -25,7 +25,7 @@ OPERATOR_BY_FLAGS = {
|
||||
|
||||
def primaryPath(directory):
|
||||
"""Returns path to the primary repository data file.
|
||||
|
||||
|
||||
@param directory repository directory that contains the repodata subdirectory
|
||||
@return str path to primary repository data file
|
||||
@raise IOError if repomd.xml contains no primary location
|
||||
@@ -33,7 +33,7 @@ def primaryPath(directory):
|
||||
metaDataPath = os.path.join(directory, "repodata", "repomd.xml")
|
||||
elementTree = ET.parse(metaDataPath)
|
||||
root = elementTree.getroot()
|
||||
|
||||
|
||||
for dataElement in root:
|
||||
if dataElement.get("type") == "primary":
|
||||
locationElement = dataElement.find(namespace("repo") + "location")
|
||||
@@ -43,110 +43,110 @@ def primaryPath(directory):
|
||||
break
|
||||
else:
|
||||
raise IOError("'%s' contains no primary location" % metaDataPath)
|
||||
|
||||
|
||||
return primaryPath
|
||||
|
||||
def queries(directory):
|
||||
"""Returns a list of RepoDataQueries constructed from the repodata under
|
||||
the directory.
|
||||
|
||||
|
||||
@param directory path to a repository directory (parent directory of
|
||||
repodata directory)
|
||||
@return list of RepoDataQuery instances
|
||||
@raise IOError if repomd.xml contains no primary location
|
||||
"""
|
||||
path = primaryPath(directory)
|
||||
|
||||
|
||||
gunzippedPrimary = gzip.GzipFile(path)
|
||||
elementTree = ET.parse(gunzippedPrimary)
|
||||
root = elementTree.getroot()
|
||||
|
||||
|
||||
packageQueries = []
|
||||
for packageElement in root:
|
||||
packageQuery = RepoDataQuery(directory, packageElement)
|
||||
packageQueries.append(packageQuery)
|
||||
|
||||
|
||||
return packageQueries
|
||||
|
||||
class RepoDataQuery(object):
|
||||
"""PackageQuery that reads in data from the repodata directory files."""
|
||||
|
||||
|
||||
def __init__(self, directory, element):
|
||||
"""Creates a RepoDataQuery from the a package Element under a metadata
|
||||
Element in a primary.xml file.
|
||||
|
||||
|
||||
@param directory repository directory path. Used to convert relative
|
||||
paths to full paths.
|
||||
@param element package Element
|
||||
"""
|
||||
self.__directory = os.path.abspath(directory)
|
||||
self.__element = element
|
||||
|
||||
|
||||
def __formatElement(self):
|
||||
return self.__element.find(namespace("common") + "format")
|
||||
|
||||
|
||||
def __parseEntry(self, element):
|
||||
entry = element.get("name")
|
||||
flags = element.get("flags")
|
||||
|
||||
|
||||
if flags is not None:
|
||||
version = element.get("ver")
|
||||
operator = OPERATOR_BY_FLAGS[flags]
|
||||
entry += " %s %s" % (operator, version)
|
||||
|
||||
|
||||
release = element.get("rel")
|
||||
if release is not None:
|
||||
entry += "-%s" % release
|
||||
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
def __parseEntryCollection(self, collection):
|
||||
formatElement = self.__formatElement()
|
||||
collectionElement = formatElement.find(namespace("rpm") + collection)
|
||||
|
||||
|
||||
entries = []
|
||||
if collectionElement is not None:
|
||||
for entryElement in collectionElement.findall(namespace("rpm") +
|
||||
"entry"):
|
||||
entry = self.__parseEntry(entryElement)
|
||||
entries.append(entry)
|
||||
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
def __versionElement(self):
|
||||
return self.__element.find(namespace("common") + "version")
|
||||
|
||||
|
||||
def arch(self):
|
||||
return self.__element.find(namespace("common") + "arch").text
|
||||
|
||||
|
||||
def description(self):
|
||||
return self.__element.find(namespace("common") + "description").text
|
||||
|
||||
|
||||
def distribution(self):
|
||||
return None
|
||||
|
||||
|
||||
def epoch(self):
|
||||
return self.__versionElement().get("epoch")
|
||||
|
||||
|
||||
def name(self):
|
||||
return self.__element.find(namespace("common") + "name").text
|
||||
|
||||
|
||||
def path(self):
|
||||
locationElement = self.__element.find(namespace("common") + "location")
|
||||
relativePath = locationElement.get("href")
|
||||
absolutePath = os.path.join(self.__directory, relativePath)
|
||||
|
||||
|
||||
return absolutePath
|
||||
|
||||
|
||||
def provides(self):
|
||||
return self.__parseEntryCollection("provides")
|
||||
|
||||
|
||||
def release(self):
|
||||
return self.__versionElement().get("rel")
|
||||
|
||||
|
||||
def requires(self):
|
||||
return self.__parseEntryCollection("requires")
|
||||
|
||||
|
||||
def vercmp(self, other):
|
||||
res = osc.util.rpmquery.RpmQuery.rpmvercmp(str(self.epoch()), str(other.epoch()))
|
||||
if res != 0:
|
||||
@@ -156,6 +156,6 @@ class RepoDataQuery(object):
|
||||
return res
|
||||
res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.release(), other.release())
|
||||
return res
|
||||
|
||||
|
||||
def version(self):
|
||||
return self.__versionElement().get("ver")
|
||||
|
@@ -20,7 +20,7 @@ class RpmHeader:
|
||||
def append(self, entry):
|
||||
self.entries.append(entry)
|
||||
|
||||
def getTag(self, tag):
|
||||
def gettag(self, tag):
|
||||
for i in self.entries:
|
||||
if i.tag == tag:
|
||||
return i
|
||||
@@ -131,7 +131,7 @@ class RpmQuery(packagequery.PackageQuery):
|
||||
entry.data = entry.data[0]
|
||||
return
|
||||
# get private i18n table
|
||||
table = self.header.getTag(100)
|
||||
table = self.header.gettag(100)
|
||||
# just care about the country code
|
||||
lang = lang.split('_', 1)[0]
|
||||
cnt = 0
|
||||
@@ -147,9 +147,9 @@ class RpmQuery(packagequery.PackageQuery):
|
||||
raise RpmHeaderError('unsupported tag type \'%d\' (tag: \'%s\'' % (entry.type, entry.tag))
|
||||
|
||||
def __reqprov(self, tag, flags, version):
|
||||
pnames = self.header.getTag(tag).data
|
||||
pflags = self.header.getTag(flags).data
|
||||
pvers = self.header.getTag(version).data
|
||||
pnames = self.header.gettag(tag).data
|
||||
pflags = self.header.gettag(flags).data
|
||||
pvers = self.header.gettag(version).data
|
||||
if not (pnames and pflags and pvers):
|
||||
raise RpmError('cannot get provides/requires, tags are missing')
|
||||
res = []
|
||||
@@ -179,46 +179,46 @@ class RpmQuery(packagequery.PackageQuery):
|
||||
|
||||
# XXX: create dict for the tag => number mapping?!
|
||||
def name(self):
|
||||
return self.header.getTag(1000).data
|
||||
return self.header.gettag(1000).data
|
||||
|
||||
def version(self):
|
||||
return self.header.getTag(1001).data
|
||||
return self.header.gettag(1001).data
|
||||
|
||||
def release(self):
|
||||
return self.header.getTag(1002).data
|
||||
return self.header.gettag(1002).data
|
||||
|
||||
def epoch(self):
|
||||
epoch = self.header.getTag(1003)
|
||||
epoch = self.header.gettag(1003)
|
||||
if epoch is None:
|
||||
return 0
|
||||
return epoch.data[0]
|
||||
|
||||
def arch(self):
|
||||
return self.header.getTag(1022).data
|
||||
return self.header.gettag(1022).data
|
||||
|
||||
def summary(self):
|
||||
return self.header.getTag(1004).data
|
||||
return self.header.gettag(1004).data
|
||||
|
||||
def description(self):
|
||||
return self.header.getTag(1005).data
|
||||
return self.header.gettag(1005).data
|
||||
|
||||
def url(self):
|
||||
entry = self.header.getTag(1020)
|
||||
entry = self.header.gettag(1020)
|
||||
if entry is None:
|
||||
return None
|
||||
return entry.data
|
||||
|
||||
def path(self):
|
||||
return self.__path
|
||||
|
||||
|
||||
def provides(self):
|
||||
return self.__reqprov(1047, 1112, 1113)
|
||||
|
||||
def requires(self):
|
||||
return self.__reqprov(1049, 1048, 1050)
|
||||
|
||||
def getTag(self, num):
|
||||
return self.header.getTag(num)
|
||||
def gettag(self, num):
|
||||
return self.header.gettag(num)
|
||||
|
||||
@staticmethod
|
||||
def query(filename):
|
||||
|
Reference in New Issue
Block a user