1
0
mirror of https://github.com/openSUSE/osc.git synced 2025-02-05 10:58:07 +01:00

Merge branch 'master' of gitorious.org:opensuse/osc

This commit is contained in:
Juergen Weigert 2010-06-29 10:49:48 +02:00
commit 430798536b
8 changed files with 296 additions and 154 deletions

3
NEWS
View File

@ -1,3 +1,6 @@
0.128
- better default commands selection for editor/pager
0.127 0.127
- add size limit mode, files can be ignored on checkout or update given a certain size limit. - add size limit mode, files can be ignored on checkout or update given a certain size limit.
- --csv/--format options for results command - using format user can explicitly specify what he wants print - --csv/--format options for results command - using format user can explicitly specify what he wants print

View File

@ -317,15 +317,20 @@ class OscConfigParser(ConfigParser.SafeConfigParser):
# XXX: simplify! # XXX: simplify!
def __str__(self): def __str__(self):
ret = [] ret = []
first = True
for line in self._sections._lines: for line in self._sections._lines:
if line.type == 'section': if line.type == 'section':
if first:
first = False
else:
ret.append('')
ret.append('[%s]' % line.name) ret.append('[%s]' % line.name)
for sline in line._lines: for sline in line._lines:
if sline.name == '__name__': if sline.name == '__name__':
continue continue
if sline.type == 'option': if sline.type == 'option':
ret.append(sline.frmt % (sline.name, sline.value)) ret.append(sline.frmt % (sline.name, sline.value))
else: elif str(sline) != '':
ret.append(str(sline)) ret.append(str(sline))
else: else:
ret.append(str(line)) ret.append(str(line))

View File

@ -165,10 +165,6 @@ def run(prg):
print >>sys.stderr, e print >>sys.stderr, e
return 1 return 1
except OSError, e:
print >>sys.stderr, e
return 1
except CpioError, e: except CpioError, e:
print >>sys.stderr, e print >>sys.stderr, e
return 1 return 1

View File

@ -8,10 +8,11 @@
import os import os
import re import re
import sys import sys
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile, mkdtemp
from shutil import rmtree from shutil import rmtree
from osc.fetch import * from osc.fetch import *
from osc.core import get_buildinfo, store_read_apiurl, store_read_project, store_read_package, meta_exists, quote_plus, get_buildconfig, is_package_dir from osc.core import get_buildinfo, store_read_apiurl, store_read_project, store_read_package, meta_exists, quote_plus, get_buildconfig, is_package_dir
from osc.core import get_binarylist, get_binary_file
from osc.util import rpmquery, debquery from osc.util import rpmquery, debquery
import osc.conf import osc.conf
import oscerr import oscerr
@ -29,6 +30,8 @@ change_personality = {
'i386': 'linux32', 'i386': 'linux32',
'ppc': 'powerpc32', 'ppc': 'powerpc32',
's390': 's390', 's390': 's390',
'sparc': 'linux32',
'sparcv8': 'linux32',
} }
can_also_build = { can_also_build = {
@ -47,6 +50,7 @@ can_also_build = {
'i586': [ 'i386', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ], 'i586': [ 'i386', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ],
'i686': [ 'i586', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ], 'i686': [ 'i586', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ],
'x86_64': ['i686', 'i586', 'i386', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ], 'x86_64': ['i686', 'i586', 'i386', 'ppc', 'ppc64', 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el', 'sh4', 'mips', 'mips64' ],
'sparc64': ['sparc64v', 'sparcv9v', 'sparcv9', 'sparcv8', 'sparc'],
} }
# real arch of this machine # real arch of this machine
@ -54,7 +58,6 @@ hostarch = os.uname()[4]
if hostarch == 'i686': # FIXME if hostarch == 'i686': # FIXME
hostarch = 'i586' hostarch = 'i586'
class Buildinfo: class Buildinfo:
"""represent the contents of a buildinfo file""" """represent the contents of a buildinfo file"""
@ -318,6 +321,29 @@ def create_deps(pkgqs):
return depfile return depfile
trustprompt = """Would you like to ...
0 - quit (default)
1 - trust packages from '%(project)s' always
2 - trust them just this time
? """
def check_trusted_projects(apiurl, projects):
trusted = config['api_host_options'][apiurl]['trusted_prj']
tlen = len(trusted)
for prj in projects:
if not prj in trusted:
print "\nThe build root needs packages from project '%s'." % prj
print "Note that malicious packages can compromise the build result or even your system."
r = raw_input(trustprompt % { 'project':prj })
if r == '1':
trusted.append(prj)
elif r != '2':
print "Well, good good bye then :-)"
raise oscerr.UserAbort()
if tlen != len(trusted):
config['api_host_options'][apiurl]['trusted_prj'] = trusted
conf.config_set_option(apiurl, 'trusted_prj', ' '.join(trusted))
def main(opts, argv): def main(opts, argv):
repo = argv[0] repo = argv[0]
@ -325,6 +351,7 @@ def main(opts, argv):
build_descr = argv[2] build_descr = argv[2]
xp = [] xp = []
build_root = None build_root = None
build_uid=''
vm_type = config['build-type'] vm_type = config['build-type']
build_descr = os.path.abspath(build_descr) build_descr = os.path.abspath(build_descr)
@ -378,7 +405,9 @@ def main(opts, argv):
if opts.without: if opts.without:
for o in opts.without: for o in opts.without:
buildargs.append('--without %s' % o) buildargs.append('--without %s' % o)
build_uid='' # FIXME: quoting
# if opts.define:
# buildargs.append('--define "%s"' % opts.define)
if config['build-uid']: if config['build-uid']:
build_uid = config['build-uid'] build_uid = config['build-uid']
if opts.build_uid: if opts.build_uid:
@ -392,9 +421,6 @@ def main(opts, argv):
else: else:
print >>sys.stderr, 'Error: build-uid arg must be 2 colon separated numerics: "uid:gid" or "caller"' print >>sys.stderr, 'Error: build-uid arg must be 2 colon separated numerics: "uid:gid" or "caller"'
return 1 return 1
# FIXME: quoting
# if opts.define:
# buildargs.append('--define "%s"' % opts.define)
if opts.vm_type: if opts.vm_type:
vm_type = opts.vm_type vm_type = opts.vm_type
if opts.alternative_project: if opts.alternative_project:
@ -568,8 +594,6 @@ def main(opts, argv):
if bi.release: if bi.release:
buildargs.append('--release %s' % bi.release) buildargs.append('--release %s' % bi.release)
buildargs = ' '.join(buildargs)
# real arch of this machine # real arch of this machine
# vs. # vs.
# arch we are supposed to build for # arch we are supposed to build for
@ -619,9 +643,77 @@ def main(opts, argv):
enable_cpio = opts.cpio_bulk_download, enable_cpio = opts.cpio_bulk_download,
cookiejar=cookiejar) cookiejar=cookiejar)
# implicitly trust the project we are building for
check_trusted_projects(apiurl, [ i for i in bi.projects.keys() if not i == prj ])
# now update the package cache # now update the package cache
fetcher.run(bi) fetcher.run(bi)
old_pkg_dir = None
if opts.oldpackages:
old_pkg_dir = opts.oldpackages
if not old_pkg_dir.startswith('/') and not opts.offline:
data = [ prj, pacname, repo, arch]
if old_pkg_dir == '_link':
p = osc.core.findpacs(os.curdir)[0]
if not p.islink():
raise oscerr.WrongOptions('package is not a link')
data[0] = p.linkinfo.project
data[1] = p.linkinfo.package
repos = osc.core.get_repositories_of_project(apiurl, data[0])
# hack for links to e.g. Factory
if not data[2] in repos and 'standard' in repos:
data[2] = 'standard'
elif old_pkg_dir != '' and old_pkg_dir != '_self':
a = old_pkg_dir.split('/')
for i in range(0, len(a)):
data[i] = a[i]
destdir = os.path.join(config['packagecachedir'], data[0], data[2], data[3])
old_pkg_dir = None
try:
print "Downloading previous build from %s ..." % '/'.join(data)
binaries = get_binarylist(apiurl, data[0], data[2], data[3], package=data[1], verbose=True)
except Exception, e:
print "Error: failed to get binaries: %s" % str(e)
binaries = []
if binaries:
class mytmpdir:
""" temporary directory that removes itself"""
def __init__(self, *args, **kwargs):
self.name = mkdtemp(*args, **kwargs)
def cleanup(self):
rmtree(self.name)
def __del__(self):
self.cleanup()
def __exit__(self):
self.cleanup()
def __str__(self):
return self.name
old_pkg_dir = mytmpdir(prefix='.build.oldpackages', dir=os.path.abspath(os.curdir))
if not os.path.exists(destdir):
os.makedirs(destdir)
for i in binaries:
fname = os.path.join(destdir, i.name)
os.symlink(fname, os.path.join(str(old_pkg_dir), i.name))
if os.path.exists(fname):
st = os.stat(fname)
if st.st_mtime == i.mtime and st.st_size == i.size:
continue
get_binary_file(apiurl,
data[0],
data[2], data[3],
i.name,
package = data[1],
target_filename = fname,
target_mtime = i.mtime,
progress_meter = True)
if old_pkg_dir != None:
buildargs.append('--oldpackages %s' % old_pkg_dir)
# Make packages from buildinfo available as repos for kiwi # Make packages from buildinfo available as repos for kiwi
if build_type == 'kiwi': if build_type == 'kiwi':
if not os.path.exists('repos'): if not os.path.exists('repos'):
@ -654,27 +746,12 @@ def main(opts, argv):
os.symlink(sffn, tffn) os.symlink(sffn, tffn)
if bi.pacsuffix == 'rpm': if bi.pacsuffix == 'rpm':
if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc": if opts.no_verify:
print 'Skipping verification of package signatures due to secure VM build'
elif opts.no_verify or opts.noinit or opts.offline:
print 'Skipping verification of package signatures' print 'Skipping verification of package signatures'
else: else:
print 'Verifying integrity of cached packages' print 'Verifying integrity of cached packages'
t = config['api_host_options'][apiurl]['trusted_prj']
for prj in bi.prjkeys:
if not prj in t:
print "\nYou are trying to use packages from project '%s'." % prj
print "Note that malicious packages can compromise your system when using chroot build enviroment."
print "Use kvm or xen builds for a safe enviroment."
# saving back to config file is complicated
# r = raw_input("Would you like to trust '%s' (a)lways, (t)emorarily or (N)ever? " % prj)
# if r == 'a':
# config['api_host_options'][apiurl]['trusted_prj'] += prj
# elif r != 't':
# print "Well, good good bye then :-)"
# sys.exit(1)
verify_pacs([ i.fullfilename for i in bi.deps ], bi.keys) verify_pacs([ i.fullfilename for i in bi.deps ], bi.keys)
elif bi.pacsuffix == 'deb': elif bi.pacsuffix == 'deb':
if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc": if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc":
print 'Skipping verification of package signatures due to secure VM build' print 'Skipping verification of package signatures due to secure VM build'
@ -751,7 +828,7 @@ def main(opts, argv):
specialcmdopts, specialcmdopts,
bi.buildarch, bi.buildarch,
vm_options, vm_options,
buildargs, ' '.join(buildargs),
build_descr) build_descr)
if need_root: if need_root:
@ -787,7 +864,6 @@ def main(opts, argv):
if opts.keep_pkgs: if opts.keep_pkgs:
for i in b_built.splitlines() + s_built.splitlines(): for i in b_built.splitlines() + s_built.splitlines():
import shutil
shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(i))) shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(i)))
if bi_file: if bi_file:

View File

@ -499,6 +499,8 @@ class Osc(cmdln.Cmdln):
cmd = args[0] cmd = args[0]
del args[0] del args[0]
apiurl = self.get_api_url()
if cmd in ['pkg']: if cmd in ['pkg']:
min_args, max_args = 0, 2 min_args, max_args = 0, 2
elif cmd in ['pattern']: elif cmd in ['pattern']:
@ -563,24 +565,24 @@ class Osc(cmdln.Cmdln):
# show # show
if not opts.edit and not opts.file and not opts.delete and not opts.create and not opts.set: if not opts.edit and not opts.file and not opts.delete and not opts.create and not opts.set:
if cmd == 'prj': if cmd == 'prj':
sys.stdout.write(''.join(show_project_meta(conf.config['apiurl'], project))) sys.stdout.write(''.join(show_project_meta(apiurl, project)))
elif cmd == 'pkg': elif cmd == 'pkg':
sys.stdout.write(''.join(show_package_meta(conf.config['apiurl'], project, package))) sys.stdout.write(''.join(show_package_meta(apiurl, project, package)))
elif cmd == 'attribute': elif cmd == 'attribute':
sys.stdout.write(''.join(show_attribute_meta(conf.config['apiurl'], project, package, subpackage, opts.attribute, opts.attribute_defaults, opts.attribute_project))) sys.stdout.write(''.join(show_attribute_meta(apiurl, project, package, subpackage, opts.attribute, opts.attribute_defaults, opts.attribute_project)))
elif cmd == 'prjconf': elif cmd == 'prjconf':
sys.stdout.write(''.join(show_project_conf(conf.config['apiurl'], project))) sys.stdout.write(''.join(show_project_conf(apiurl, project)))
elif cmd == 'user': elif cmd == 'user':
r = get_user_meta(conf.config['apiurl'], user) r = get_user_meta(apiurl, user)
if r: if r:
sys.stdout.write(''.join(r)) sys.stdout.write(''.join(r))
elif cmd == 'pattern': elif cmd == 'pattern':
if pattern: if pattern:
r = show_pattern_meta(conf.config['apiurl'], project, pattern) r = show_pattern_meta(apiurl, project, pattern)
if r: if r:
sys.stdout.write(''.join(r)) sys.stdout.write(''.join(r))
else: else:
r = show_pattern_metalist(conf.config['apiurl'], project) r = show_pattern_metalist(apiurl, project)
if r: if r:
sys.stdout.write('\n'.join(r) + '\n') sys.stdout.write('\n'.join(r) + '\n')
@ -590,6 +592,7 @@ class Osc(cmdln.Cmdln):
edit_meta(metatype='prj', edit_meta(metatype='prj',
edit=True, edit=True,
path_args=quote_plus(project), path_args=quote_plus(project),
apiurl=apiurl,
template_args=({ template_args=({
'name': project, 'name': project,
'user': conf.config['user']})) 'user': conf.config['user']}))
@ -597,6 +600,7 @@ class Osc(cmdln.Cmdln):
edit_meta(metatype='pkg', edit_meta(metatype='pkg',
edit=True, edit=True,
path_args=(quote_plus(project), quote_plus(package)), path_args=(quote_plus(project), quote_plus(package)),
apiurl=apiurl,
template_args=({ template_args=({
'name': package, 'name': package,
'user': conf.config['user']})) 'user': conf.config['user']}))
@ -604,16 +608,19 @@ class Osc(cmdln.Cmdln):
edit_meta(metatype='prjconf', edit_meta(metatype='prjconf',
edit=True, edit=True,
path_args=quote_plus(project), path_args=quote_plus(project),
apiurl=apiurl,
template_args=None) template_args=None)
elif cmd == 'user': elif cmd == 'user':
edit_meta(metatype='user', edit_meta(metatype='user',
edit=True, edit=True,
path_args=(quote_plus(user)), path_args=(quote_plus(user)),
apiurl=apiurl,
template_args=({'user': user})) template_args=({'user': user}))
elif cmd == 'pattern': elif cmd == 'pattern':
edit_meta(metatype='pattern', edit_meta(metatype='pattern',
edit=True, edit=True,
path_args=(project, pattern), path_args=(project, pattern),
apiurl=apiurl,
template_args=None) template_args=None)
# create attribute entry # create attribute entry
@ -627,7 +634,7 @@ class Osc(cmdln.Cmdln):
values += '<value>%s</value>' % i values += '<value>%s</value>' % i
aname = opts.attribute.split(":") aname = opts.attribute.split(":")
d = '<attributes><attribute namespace=\'%s\' name=\'%s\' >%s</attribute></attributes>' % (aname[0], aname[1], values) d = '<attributes><attribute namespace=\'%s\' name=\'%s\' >%s</attribute></attributes>' % (aname[0], aname[1], values)
url = makeurl(conf.config['apiurl'], attributepath) url = makeurl(apiurl, attributepath)
for data in streamfile(url, http_POST, data=d): for data in streamfile(url, http_POST, data=d):
sys.stdout.write(data) sys.stdout.write(data)
@ -646,26 +653,31 @@ class Osc(cmdln.Cmdln):
edit_meta(metatype='prj', edit_meta(metatype='prj',
data=f, data=f,
edit=opts.edit, edit=opts.edit,
apiurl=apiurl,
path_args=quote_plus(project)) path_args=quote_plus(project))
elif cmd == 'pkg': elif cmd == 'pkg':
edit_meta(metatype='pkg', edit_meta(metatype='pkg',
data=f, data=f,
edit=opts.edit, edit=opts.edit,
apiurl=apiurl,
path_args=(quote_plus(project), quote_plus(package))) path_args=(quote_plus(project), quote_plus(package)))
elif cmd == 'prjconf': elif cmd == 'prjconf':
edit_meta(metatype='prjconf', edit_meta(metatype='prjconf',
data=f, data=f,
edit=opts.edit, edit=opts.edit,
apiurl=apiurl,
path_args=quote_plus(project)) path_args=quote_plus(project))
elif cmd == 'user': elif cmd == 'user':
edit_meta(metatype='user', edit_meta(metatype='user',
data=f, data=f,
edit=opts.edit, edit=opts.edit,
apiurl=apiurl,
path_args=(quote_plus(user))) path_args=(quote_plus(user)))
elif cmd == 'pattern': elif cmd == 'pattern':
edit_meta(metatype='pattern', edit_meta(metatype='pattern',
data=f, data=f,
edit=opts.edit, edit=opts.edit,
apiurl=apiurl,
path_args=(project, pattern)) path_args=(project, pattern))
@ -674,13 +686,13 @@ class Osc(cmdln.Cmdln):
path = metatypes[cmd]['path'] path = metatypes[cmd]['path']
if cmd == 'pattern': if cmd == 'pattern':
path = path % (project, pattern) path = path % (project, pattern)
u = makeurl(conf.config['apiurl'], [path]) u = makeurl(apiurl, [path])
http_DELETE(u) http_DELETE(u)
elif cmd == 'attribute': elif cmd == 'attribute':
if not opts.attribute: if not opts.attribute:
raise oscerr.WrongOptions('no attribute given to create') raise oscerr.WrongOptions('no attribute given to create')
attributepath.append(opts.attribute) attributepath.append(opts.attribute)
u = makeurl(conf.config['apiurl'], attributepath) u = makeurl(apiurl, attributepath)
for data in streamfile(u, http_DELETE): for data in streamfile(u, http_DELETE):
sys.stdout.write(data) sys.stdout.write(data)
else: else:
@ -836,7 +848,7 @@ class Osc(cmdln.Cmdln):
print "Requests created: ", print "Requests created: ",
for i in sr_ids: for i in sr_ids:
print i, print i,
sys.exit('Successfull finished') sys.exit('Successfully finished')
elif len(args) <= 2: elif len(args) <= 2:
# try using the working copy at hand # try using the working copy at hand
@ -948,7 +960,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
print 'created request id', result print 'created request id', result
def _actionparser(option, opt_str, value, parser): def _actionparser(self, opt_str, value, parser):
value = [] value = []
if not hasattr(parser.values, 'actiondata'): if not hasattr(parser.values, 'actiondata'):
setattr(parser.values, 'actiondata', []) setattr(parser.values, 'actiondata', [])
@ -2477,8 +2489,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
help='place PACKAGE folder in the current directory' \ help='place PACKAGE folder in the current directory' \
'instead of a PROJECT/PACKAGE directory') 'instead of a PROJECT/PACKAGE directory')
@cmdln.option('-s', '--source-service-files', action='store_true', @cmdln.option('-s', '--source-service-files', action='store_true',
help='server side generated files of source services' \ help='Use server side generated sources instead of local generation.' )
'gets downloaded as well' ) @cmdln.option('-S', '--server-side-source-service-files', action='store_true',
help='Use server side generated sources instead of local generation.' )
@cmdln.option('-l', '--limit-size', metavar='limit_size', @cmdln.option('-l', '--limit-size', metavar='limit_size',
help='Skip all files with a given size') help='Skip all files with a given size')
@cmdln.alias('co') @cmdln.alias('co')
@ -2511,10 +2524,6 @@ Please submit there instead, or use --nodevelproject to force direct submission.
expand_link = False expand_link = False
else: else:
expand_link = True expand_link = True
if opts.source_service_files:
service_files = True
else:
service_files = False
args = slash_split(args) args = slash_split(args)
project = package = filename = None project = package = filename = None
@ -2550,7 +2559,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
if opts.current_dir: if opts.current_dir:
project_dir = None project_dir = None
checkout_package(apiurl, project, package, rev, expand_link=expand_link, \ checkout_package(apiurl, project, package, rev, expand_link=expand_link, \
prj_dir=project_dir, service_files=service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta) prj_dir=project_dir, service_files = opts.source_service_files, server_service_files=opts.server_side_source_service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta)
print_request_list(apiurl, project, package) print_request_list(apiurl, project, package)
elif project: elif project:
@ -2570,13 +2579,13 @@ Please submit there instead, or use --nodevelproject to force direct submission.
for package in meta_get_packagelist(apiurl, project): for package in meta_get_packagelist(apiurl, project):
try: try:
checkout_package(apiurl, project, package, expand_link = expand_link, \ checkout_package(apiurl, project, package, expand_link = expand_link, \
prj_dir = prj_dir, service_files = service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta) prj_dir = prj_dir, service_files = opts.source_service_files, server_service_files = opts.server_side_source_service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta)
except oscerr.LinkExpandError, e: except oscerr.LinkExpandError, e:
print >>sys.stderr, 'Link cannot be expanded:\n', e print >>sys.stderr, 'Link cannot be expanded:\n', e
print >>sys.stderr, 'Use "osc repairlink" for fixing merge conflicts:\n' print >>sys.stderr, 'Use "osc repairlink" for fixing merge conflicts:\n'
# check out in unexpanded form at least # check out in unexpanded form at least
checkout_package(apiurl, project, package, expand_link = False, \ checkout_package(apiurl, project, package, expand_link = False, \
prj_dir = prj_dir, service_files = service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta) prj_dir = prj_dir, service_files = opts.source_service_files, server_service_files = opts.server_side_source_service_files, progress_obj=self.download_progress, limit_size=opts.limit_size, meta=opts.meta)
print_request_list(apiurl, project) print_request_list(apiurl, project)
else: else:
@ -2900,6 +2909,8 @@ Please submit there instead, or use --nodevelproject to force direct submission.
help='if a package is a link, update to the expanded sources') help='if a package is a link, update to the expanded sources')
@cmdln.option('-s', '--source-service-files', action='store_true', @cmdln.option('-s', '--source-service-files', action='store_true',
help='Use server side generated sources instead of local generation.' ) help='Use server side generated sources instead of local generation.' )
@cmdln.option('-S', '--server-side-source-service-files', action='store_true',
help='Use server side generated sources instead of local generation.' )
@cmdln.option('-l', '--limit-size', metavar='limit_size', @cmdln.option('-l', '--limit-size', metavar='limit_size',
help='Skip all files with a given size') help='Skip all files with a given size')
@cmdln.alias('up') @cmdln.alias('up')
@ -2935,9 +2946,6 @@ Please submit there instead, or use --nodevelproject to force direct submission.
raise oscerr.WrongOptions('Sorry, the options --expand-link, --unexpand-link and ' raise oscerr.WrongOptions('Sorry, the options --expand-link, --unexpand-link and '
'--revision are mutually exclusive.') '--revision are mutually exclusive.')
if opts.source_service_files: service_files = True
else: service_files = False
args = parseargs(args) args = parseargs(args)
arg_list = args[:] arg_list = args[:]
@ -2991,17 +2999,20 @@ Please submit there instead, or use --nodevelproject to force direct submission.
rev = p.show_upstream_xsrcmd5(linkrev="base") rev = p.show_upstream_xsrcmd5(linkrev="base")
p.mark_frozen() p.mark_frozen()
else: else:
p.update(rev, service_files, opts.limit_size) p.update(rev, opts.server_side_source_service_files, opts.limit_size)
rev = p.linkinfo.xsrcmd5 rev = p.linkinfo.xsrcmd5
print 'Expanding to rev', rev print 'Expanding to rev', rev
elif opts.unexpand_link and p.islink() and p.isexpanded(): elif opts.unexpand_link and p.islink() and p.isexpanded():
print 'Unexpanding to rev', p.linkinfo.lsrcmd5 print 'Unexpanding to rev', p.linkinfo.lsrcmd5
p.update(rev, service_files, opts.limit_size) p.update(rev, opts.server_side_source_service_files, opts.limit_size)
rev = p.linkinfo.lsrcmd5 rev = p.linkinfo.lsrcmd5
elif p.islink() and p.isexpanded(): elif p.islink() and p.isexpanded():
rev = p.latest_rev() rev = p.latest_rev()
p.update(rev, service_files, opts.limit_size) p.update(rev, opts.server_side_source_service_files, opts.limit_size)
if opts.source_service_files:
print 'Running local source services'
p.run_source_services()
if opts.unexpand_link: if opts.unexpand_link:
p.unmark_frozen() p.unmark_frozen()
rev = None rev = None
@ -3218,8 +3229,10 @@ Please submit there instead, or use --nodevelproject to force direct submission.
wd = os.curdir wd = os.curdir
if is_project_dir(wd): if is_project_dir(wd):
opts.csv = None opts.csv = None
opts.arch = None if opts.arch == []:
opts.repo = None opts.arch = None
if opts.repo == []:
opts.repo = None
opts.hide_legend = None opts.hide_legend = None
opts.name_filter = None opts.name_filter = None
opts.status_filter = None opts.status_filter = None
@ -3891,6 +3904,8 @@ Please submit there instead, or use --nodevelproject to force direct submission.
help='enable downloading packages as cpio archive from api') help='enable downloading packages as cpio archive from api')
@cmdln.option('--download-api-only', action='store_true', @cmdln.option('--download-api-only', action='store_true',
help=SUPPRESS_HELP) help=SUPPRESS_HELP)
@cmdln.option('--oldpackages', metavar='DIR',
help='take previous build from DIR (special values: _self, _link)')
def do_build(self, subcmd, opts, *args): def do_build(self, subcmd, opts, *args):
"""${cmd_name}: Build a package on your local machine """${cmd_name}: Build a package on your local machine
@ -4139,7 +4154,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
Usage: Usage:
osc log (inside working copy) osc log (inside working copy)
osc log remote_project remote_package osc log remote_project [remote_package]
${cmd_option_list} ${cmd_option_list}
""" """
@ -4150,21 +4165,27 @@ Please submit there instead, or use --nodevelproject to force direct submission.
if len(args) == 0: if len(args) == 0:
wd = os.curdir wd = os.curdir
project = store_read_project(wd) if is_project_dir(wd) or is_package_dir(wd):
package = store_read_package(wd) project = store_read_project(wd)
elif len(args) < 2: if is_project_dir(wd):
package = "_project"
else:
package = store_read_package(wd)
else:
raise oscerr.NoWorkingCopy("Error: \"%s\" is not an osc working copy." % os.path.abspath(wd))
elif len(args) < 1:
raise oscerr.WrongArgs('Too few arguments (required none or two)') raise oscerr.WrongArgs('Too few arguments (required none or two)')
elif len(args) > 2: elif len(args) > 2:
raise oscerr.WrongArgs('Too many arguments (required none or two)') raise oscerr.WrongArgs('Too many arguments (required none or two)')
elif len(args) == 1:
project = args[0]
package = "_project"
else: else:
project = args[0] project = args[0]
package = args[1] package = args[1]
if opts.meta:
meta = 1
rev, dummy = parseRevisionOption(opts.revision) rev, dummy = parseRevisionOption(opts.revision)
if rev and not checkRevision(project, package, rev, apiurl, meta): if rev and not checkRevision(project, package, rev, apiurl, opts.meta):
print >>sys.stderr, 'Revision \'%s\' does not exist' % rev print >>sys.stderr, 'Revision \'%s\' does not exist' % rev
sys.exit(1) sys.exit(1)
@ -4174,7 +4195,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
if opts.xml: if opts.xml:
format = 'xml' format = 'xml'
log = '\n'.join(get_commitlog(apiurl, project, package, rev, format, meta)) log = '\n'.join(get_commitlog(apiurl, project, package, rev, format, opts.meta))
run_pager(log) run_pager(log)
@cmdln.option('-f', '--failed', action='store_true', @cmdln.option('-f', '--failed', action='store_true',
@ -4289,21 +4310,34 @@ Please submit there instead, or use --nodevelproject to force direct submission.
otherwise all binary packages in the project will be deleted. otherwise all binary packages in the project will be deleted.
usage: usage:
osc wipebinaries OPTS # works in checked out project dir
osc wipebinaries OPTS PROJECT [PACKAGE] osc wipebinaries OPTS PROJECT [PACKAGE]
${cmd_option_list} ${cmd_option_list}
""" """
args = slash_split(args) args = slash_split(args)
package = project = None
apiurl = self.get_api_url()
# try to get project and package from checked out dirs
if len(args) < 1: if len(args) < 1:
raise oscerr.WrongArgs('Missing <project> argument.') if is_project_dir(os.getcwd()):
project = store_read_project(os.curdir)
if is_package_dir(os.getcwd()):
project = store_read_project(os.curdir)
package = store_read_package(os.curdir)
if project is None:
raise oscerr.WrongArgs('Missing <project> argument.')
if len(args) > 2: if len(args) > 2:
raise oscerr.WrongArgs('Wrong number of arguments.') raise oscerr.WrongArgs('Wrong number of arguments.')
# respect given project and package
if len(args) >= 1:
project = args[0]
if len(args) == 2: if len(args) == 2:
package = args[1] package = args[1]
else:
package = None
codes = [] codes = []
if opts.build_disabled: if opts.build_disabled:
@ -4322,7 +4356,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
# make a new request for each code= parameter # make a new request for each code= parameter
for code in codes: for code in codes:
print wipebinaries(conf.config['apiurl'], args[0], package, opts.arch, opts.repo, code) print wipebinaries(apiurl, project, package, opts.arch, opts.repo, code)
@cmdln.option('-q', '--quiet', action='store_true', @cmdln.option('-q', '--quiet', action='store_true',
@ -4339,7 +4373,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
others even when they are not "published" yet. others even when they are not "published" yet.
usage: usage:
osc getbinaries REPOSITORY # works in checked out package (check out all archs in subdirs) osc getbinaries REPOSITORY # works in checked out package (check out all archs in subdirs)
osc getbinaries REPOSITORY ARCHITECTURE # works in checked out package osc getbinaries REPOSITORY ARCHITECTURE # works in checked out package
osc getbinaries PROJECT PACKAGE REPOSITORY ARCHITECTURE osc getbinaries PROJECT PACKAGE REPOSITORY ARCHITECTURE
${cmd_option_list} ${cmd_option_list}
@ -4391,6 +4425,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
os.makedirs(target_dir, 0755) os.makedirs(target_dir, 0755)
for i in binaries: for i in binaries:
# skip source rpms
if not opts.sources and i.name.endswith('.src.rpm'):
continue
fname = '%s/%s' % (target_dir, i.name) fname = '%s/%s' % (target_dir, i.name)
if os.path.exists(fname): if os.path.exists(fname):
st = os.stat(fname) st = os.stat(fname)

View File

@ -101,7 +101,7 @@ DEFAULTS = { 'apiurl': 'https://api.opensuse.org',
'checkout_no_colon': '0', 'checkout_no_colon': '0',
# local files to ignore with status, addremove, .... # local files to ignore with status, addremove, ....
# local files to ignore with status, addremove, .... # local files to ignore with status, addremove, ....
'exclude_glob': '.osc CVS .svn .* _linkerror *~ #*# *.orig *.bak', 'exclude_glob': '.osc CVS .svn .* _linkerror *~ #*# *.orig *.bak *.changes.*',
# keep passwords in plaintext. If you see this comment, your osc # keep passwords in plaintext. If you see this comment, your osc
# already uses the encrypted password, and only keeps them in plain text # already uses the encrypted password, and only keeps them in plain text
# for backwards compatibility. Default will change to 0 in future releases. # for backwards compatibility. Default will change to 0 in future releases.
@ -466,8 +466,22 @@ def config_set_option(section, opt, val=None, delete=False, update=True, **kwarg
cp = get_configParser(config['conffile']) cp = get_configParser(config['conffile'])
# don't allow "internal" options # don't allow "internal" options
general_opts = [i for i in DEFAULTS.keys() if not i in ['user', 'pass', 'passx']] general_opts = [i for i in DEFAULTS.keys() if not i in ['user', 'pass', 'passx']]
section = config['apiurl_aliases'].get(section, section) if section != 'general':
sections = dict([[i.rstrip('/'), i] for i in cp.sections()]) section = config['apiurl_aliases'].get(section, section)
scheme, host = \
parse_apisrv_url(config.get('scheme', 'https'), section)
section = urljoin(scheme, host)
sections = {}
for url in cp.sections():
if url == 'general':
sections[url] = url
else:
scheme, host = \
parse_apisrv_url(config.get('scheme', 'https'), url)
apiurl = urljoin(scheme, host)
sections[apiurl] = url
section = sections.get(section.rstrip('/'), section) section = sections.get(section.rstrip('/'), section)
if not section in cp.sections(): if not section in cp.sections():
raise oscerr.ConfigError('unknown section \'%s\'' % section, config['conffile']) raise oscerr.ConfigError('unknown section \'%s\'' % section, config['conffile'])
@ -722,7 +736,7 @@ def get_config(override_conffile = None,
api_host_options[apiurl]['sslcertck'] = True api_host_options[apiurl]['sslcertck'] = True
if cp.has_option(url, 'trusted_prj'): if cp.has_option(url, 'trusted_prj'):
api_host_options[apiurl]['trusted_prj'] = cp.get(url, key).split(' ') api_host_options[apiurl]['trusted_prj'] = cp.get(url, 'trusted_prj').split(' ')
else: else:
api_host_options[apiurl]['trusted_prj'] = [] api_host_options[apiurl]['trusted_prj'] = []

View File

@ -3,7 +3,7 @@
# and distributed under the terms of the GNU General Public Licence, # and distributed under the terms of the GNU General Public Licence,
# either version 2, or version 3 (at your option). # either version 2, or version 3 (at your option).
__version__ = '0.126git' __version__ = '0.127git'
# __store_version__ is to be incremented when the format of the working copy # __store_version__ is to be incremented when the format of the working copy
# "store" changes in an incompatible way. Please add any needed migration # "store" changes in an incompatible way. Please add any needed migration
@ -310,7 +310,7 @@ class Serviceinfo:
name = call.split(None, 1)[0] name = call.split(None, 1)[0]
if not os.path.exists("/usr/lib/obs/service/"+name): if not os.path.exists("/usr/lib/obs/service/"+name):
msg = "ERROR: service is not installed!\n" msg = "ERROR: service is not installed!\n"
msg += "Maybe try this: zypper in obs-server-" + name msg += "Maybe try this: zypper in obs-service-" + name
raise oscerr.APIError(msg) raise oscerr.APIError(msg)
c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
if conf.config['verbose'] > 1: if conf.config['verbose'] > 1:
@ -755,12 +755,11 @@ class Project:
class Package: class Package:
"""represent a package (its directory) and read/keep/write its metadata""" """represent a package (its directory) and read/keep/write its metadata"""
def __init__(self, workingdir, progress_obj=None, limit_size=None, meta=None): def __init__(self, workingdir, progress_obj=None, limit_size=None):
self.dir = workingdir self.dir = workingdir
self.absdir = os.path.abspath(self.dir) self.absdir = os.path.abspath(self.dir)
self.storedir = os.path.join(self.absdir, store) self.storedir = os.path.join(self.absdir, store)
self.progress_obj = progress_obj self.progress_obj = progress_obj
self.meta = meta
self.limit_size = limit_size self.limit_size = limit_size
if limit_size and limit_size == 0: if limit_size and limit_size == 0:
self.limit_size = None self.limit_size = None
@ -846,17 +845,15 @@ class Package:
self.write_conflictlist() self.write_conflictlist()
# XXX: this isn't used at all
def write_meta_mode(self): def write_meta_mode(self):
# XXX: the "elif" is somehow a contradiction (with current and the old implementation
# it's not possible to "leave" the metamode again) (except if you modify pac.meta
# which is really ugly:) )
if self.meta: if self.meta:
fname = os.path.join(self.storedir, '_meta_mode') store_write_string(self.absdir, '_meta_mode', '')
f = open(fname, 'w') elif self.ismetamode():
f.write(str("true")) os.unlink(os.path.join(self.storedir, '_meta_mode'))
f.close()
else:
try:
os.unlink(os.path.join(self.storedir, '_meta_mode'))
except:
pass
def write_sizelimit(self): def write_sizelimit(self):
if self.size_limit and self.size_limit <= 0: if self.size_limit and self.size_limit <= 0:
@ -1142,7 +1139,7 @@ class Package:
self.in_conflict = read_inconflict(self.dir) self.in_conflict = read_inconflict(self.dir)
self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair')) self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
self.size_limit = read_sizelimit(self.dir) self.size_limit = read_sizelimit(self.dir)
self.meta = read_meta_mode(self.dir) self.meta = self.ismetamode()
# gather unversioned files, but ignore some stuff # gather unversioned files, but ignore some stuff
self.excluded = [ i for i in os.listdir(self.dir) self.excluded = [ i for i in os.listdir(self.dir)
@ -1175,6 +1172,10 @@ class Package:
"""tells us if the link is frozen.""" """tells us if the link is frozen."""
return os.path.isfile(os.path.join(self.storedir, '_frozenlink')) return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
def ismetamode(self):
"""tells us if the package is in meta mode"""
return os.path.isfile(os.path.join(self.storedir, '_meta_mode'))
def get_pulled_srcmd5(self): def get_pulled_srcmd5(self):
pulledrev = None pulledrev = None
for line in open(os.path.join(self.storedir, '_pulled'), 'r'): for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
@ -1959,17 +1960,6 @@ def read_tobedeleted(dir):
return r return r
def read_meta_mode(dir):
r = None
fname = os.path.join(dir, store, '_meta_mode')
if os.path.exists(fname):
r = open(fname).readline()
if r is None or not r == "true":
return None
return 1
def read_sizelimit(dir): def read_sizelimit(dir):
r = None r = None
fname = os.path.join(dir, store, '_size_limit') fname = os.path.join(dir, store, '_size_limit')
@ -2135,7 +2125,7 @@ def init_project_dir(apiurl, dir, project):
if conf.config['do_package_tracking']: if conf.config['do_package_tracking']:
store_write_initial_packages(dir, project, []) store_write_initial_packages(dir, project, [])
def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None, meta=None): def init_package_dir(apiurl, project, package, dir, revision=None, files=True, limit_size=None, meta=False):
if not os.path.isdir(store): if not os.path.isdir(store):
os.mkdir(store) os.mkdir(store)
os.chdir(store) os.chdir(store)
@ -2147,31 +2137,21 @@ def init_package_dir(apiurl, project, package, dir, revision=None, files=True, l
f.close() f.close()
if meta: if meta:
f = open('_meta_mode', 'w') store_write_string(os.pardir, '_meta_mode', '')
f.write("true")
f.close()
if limit_size: if limit_size:
f = open('_size_limit', 'w') store_write_string(os.pardir, '_size_limit', str(limit_size))
f.write(str(limit_size))
f.close()
if files: if files:
f = open('_files', 'w') fmeta = ''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size, meta=meta))
f.write(''.join(show_files_meta(apiurl, project, package, revision=revision, limit_size=limit_size, meta=meta))) store_write_string(os.pardir, '_files', fmeta)
f.close()
else: else:
# create dummy # create dummy
ET.ElementTree(element=ET.Element('directory')).write('_files') ET.ElementTree(element=ET.Element('directory')).write('_files')
f = open('_osclib_version', 'w') store_write_string(os.pardir, '_osclib_version', __store_version__ + '\n')
f.write(__store_version__ + '\n')
f.close()
store_write_apiurl(os.path.pardir, apiurl) store_write_apiurl(os.path.pardir, apiurl)
os.chdir(os.pardir) os.chdir(os.pardir)
return
def check_store_version(dir): def check_store_version(dir):
@ -2278,13 +2258,13 @@ def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
raise raise
def show_package_meta(apiurl, prj, pac, meta=None): def show_package_meta(apiurl, prj, pac, meta=False):
query = {} query = {}
if meta: if meta:
query['meta'] = 1 query['meta'] = 1
# packages like _project, _pattern and _project do not have a _meta file # packages like _pattern and _project do not have a _meta file
if pac.startswith('_'): if pac.startswith('_pattern') or pac.startswith('_project'):
return "" return ""
url = makeurl(apiurl, ['source', prj, pac, '_meta'], query) url = makeurl(apiurl, ['source', prj, pac, '_meta'], query)
@ -2498,7 +2478,7 @@ def edit_meta(metatype,
f.sync() f.sync()
def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None, meta=None): def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False, limit_size=None, meta=False):
query = {} query = {}
if revision: if revision:
query['rev'] = revision query['rev'] = revision
@ -2524,12 +2504,12 @@ def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None,
return ET.tostring(root) return ET.tostring(root)
def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=None): def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=False):
m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta) m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta)
return ET.fromstring(''.join(m)).get('srcmd5') return ET.fromstring(''.join(m)).get('srcmd5')
def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=None): def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=False):
m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta) m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta)
try: try:
# only source link packages have a <linkinfo> element. # only source link packages have a <linkinfo> element.
@ -2545,7 +2525,7 @@ def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrep
return li.xsrcmd5 return li.xsrcmd5
def show_upstream_rev(apiurl, prj, pac, meta=None): def show_upstream_rev(apiurl, prj, pac, meta=False):
m = show_files_meta(apiurl, prj, pac, meta=meta) m = show_files_meta(apiurl, prj, pac, meta=meta)
return ET.fromstring(''.join(m)).get('rev') return ET.fromstring(''.join(m)).get('rev')
@ -2605,6 +2585,38 @@ def read_meta_from_spec(specfile, *args):
return spec_data return spec_data
def get_default_editor():
import platform
system = platform.system()
if system == 'Windows':
return 'notepad'
if system == 'Linux':
try:
# Python 2.6
dist = platform.linux_distribution()[0]
except AttributeError:
dist = platform.dist()[0]
if dist == 'debian':
return 'editor'
return 'vim'
return 'vi'
def get_default_pager():
import platform
system = platform.system()
if system == 'Windows':
return 'less'
if system == 'Linux':
try:
# Python 2.6
dist = platform.linux_distribution()[0]
except AttributeError:
dist = platform.dist()[0]
if dist == 'debian':
return 'pager'
return 'less'
return 'more'
def run_pager(message): def run_pager(message):
import tempfile, sys import tempfile, sys
@ -2614,15 +2626,12 @@ def run_pager(message):
tmpfile = tempfile.NamedTemporaryFile() tmpfile = tempfile.NamedTemporaryFile()
tmpfile.write(message) tmpfile.write(message)
tmpfile.flush() tmpfile.flush()
pager = os.getenv('PAGER', default='less') pager = os.getenv('PAGER', default=get_default_pager())
subprocess.call('%s %s' % (pager, tmpfile.name), shell=True) subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
tmpfile.close() tmpfile.close()
def run_editor(filename): def run_editor(filename):
if sys.platform[:3] != 'win': editor = os.getenv('EDITOR', default=get_default_editor())
editor = os.getenv('EDITOR', default='vim')
else:
editor = os.getenv('EDITOR', default='notepad')
return subprocess.call([ editor, filename ]) return subprocess.call([ editor, filename ])
@ -2928,7 +2937,7 @@ def download(url, filename, progress_obj = None, mtime = None):
if mtime: if mtime:
os.utime(filename, (-1, mtime)) os.utime(filename, (-1, mtime))
def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None, meta=None): def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None, mtime=None, meta=False):
targetfilename = targetfilename or filename targetfilename = targetfilename or filename
query = {} query = {}
if meta: if meta:
@ -3145,6 +3154,7 @@ def make_diff(wc, revision):
diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile), diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
revision, file, cmp_pac.storedir, file)) revision, file, cmp_pac.storedir, file))
os.unlink(tmpfile)
os.chdir(olddir) os.chdir(olddir)
if cmp_pac != None: if cmp_pac != None:
delete_dir(cmp_pac.absdir) delete_dir(cmp_pac.absdir)
@ -3155,7 +3165,7 @@ def make_diff(wc, revision):
def server_diff(apiurl, def server_diff(apiurl,
old_project, old_package, old_revision, old_project, old_package, old_revision,
new_project, new_package, new_revision, unified=False, missingok=False, meta=None): new_project, new_package, new_revision, unified=False, missingok=False, meta=False):
query = {'cmd': 'diff', 'expand': '1'} query = {'cmd': 'diff', 'expand': '1'}
if old_project: if old_project:
query['oproject'] = old_project query['oproject'] = old_project
@ -3222,7 +3232,7 @@ def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
def checkout_package(apiurl, project, package, def checkout_package(apiurl, project, package,
revision=None, pathname=None, prj_obj=None, revision=None, pathname=None, prj_obj=None,
expand_link=False, prj_dir=None, service_files=None, progress_obj=None, limit_size=None, meta=None): expand_link=False, prj_dir=None, server_service_files = None, service_files=None, progress_obj=None, limit_size=None, meta=False):
try: try:
# the project we're in might be deleted. # the project we're in might be deleted.
# that'll throw an error then. # that'll throw an error then.
@ -3268,7 +3278,7 @@ def checkout_package(apiurl, project, package,
for filename in p.filenamelist: for filename in p.filenamelist:
if filename in p.skipped: if filename in p.skipped:
continue continue
if service_files or not filename.startswith('_service:'): if server_service_files or not filename.startswith('_service:'):
p.updatefile(filename, revision) p.updatefile(filename, revision)
# print 'A ', os.path.join(project, package, filename) # print 'A ', os.path.join(project, package, filename)
print statfrmt('A', os.path.join(pathname, filename)) print statfrmt('A', os.path.join(pathname, filename))
@ -3278,6 +3288,9 @@ def checkout_package(apiurl, project, package,
prj_obj = Project(os.getcwd()) prj_obj = Project(os.getcwd())
prj_obj.set_state(p.name, ' ') prj_obj.set_state(p.name, ' ')
prj_obj.write_packages() prj_obj.write_packages()
if service_files:
print "Running local source services"
p.run_source_services()
os.chdir(olddir) os.chdir(olddir)
@ -3728,7 +3741,7 @@ def get_results(apiurl, prj, package, lastbuild=None, repository=[], arch=[]):
r = [] r = []
result_line_templ = '%(rep)-20s %(arch)-10s %(status)s' result_line_templ = '%(rep)-20s %(arch)-10s %(status)s'
for res in get_package_results(apiurl, prj, package, lastbuild=None, repository=[], arch=[]): for res in get_package_results(apiurl, prj, package, lastbuild, repository, arch):
res['status'] = res['code'] res['status'] = res['code']
if res['details'] != '': if res['details'] != '':
res['status'] += ': %s' % (res['details'], ) res['status'] += ': %s' % (res['details'], )
@ -3752,16 +3765,17 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non
targets = [] targets = []
# {package: {(repo,arch): status}} # {package: {(repo,arch): status}}
status = {} status = {}
if not root.find('result'): if root.find('result') == None:
return [] return []
for node in root.find('result'): for results in root.findall('result'):
pacs.append(node.get('package')) for node in results:
pacs.sort() pacs.append(node.get('package'))
pacs = sorted(list(set(pacs)))
for node in root.findall('result'): for node in root.findall('result'):
# filter architecture and repository # filter architecture and repository
if arch != None and arch != node.get('arch'): if arch != None and node.get('arch') not in arch:
continue continue
if repo != None and repo != node.get('repository'): if repo != None and node.get('repository') not in repo:
continue continue
if node.get('dirty') == "true": if node.get('dirty') == "true":
state = "outdated" state = "outdated"
@ -4041,7 +4055,7 @@ def print_jobhistory(apiurl, prj, current_package, repository, arch, format = 't
print '%s %-50s %-16s %-16s %-16s %-16s' % (endtime, package[0:49], reason[0:15], code[0:15], waitbuild, worker) print '%s %-50s %-16s %-16s %-16s %-16s' % (endtime, package[0:49], reason[0:15], code[0:15], waitbuild, worker)
def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = None): def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = False):
import time, locale import time, locale
query = {} query = {}
@ -4287,7 +4301,7 @@ def parseRevisionOption(string):
else: else:
return None, None return None, None
def checkRevision(prj, pac, revision, apiurl=None): def checkRevision(prj, pac, revision, apiurl=None, meta=False):
""" """
check if revision is valid revision, i.e. it is not check if revision is valid revision, i.e. it is not
larger than the upstream revision id larger than the upstream revision id
@ -4298,7 +4312,7 @@ def checkRevision(prj, pac, revision, apiurl=None):
if not apiurl: if not apiurl:
apiurl = conf.config['apiurl'] apiurl = conf.config['apiurl']
try: try:
if int(revision) > int(show_upstream_rev(apiurl, prj, pac)) \ if int(revision) > int(show_upstream_rev(apiurl, prj, pac, meta)) \
or int(revision) <= 0: or int(revision) <= 0:
return False return False
else: else:
@ -4461,9 +4475,7 @@ def unpack_srcrpm(srpm, dir, *files):
print >>sys.stderr, 'error - \'%s\' is not a source rpm.' % srpm print >>sys.stderr, 'error - \'%s\' is not a source rpm.' % srpm
sys.exit(1) sys.exit(1)
curdir = os.getcwd() curdir = os.getcwd()
if not os.path.isdir(dir): if os.path.isdir(dir):
dir = curdir
else:
os.chdir(dir) os.chdir(dir)
cmd = 'rpm2cpio %s | cpio -i %s &> /dev/null' % (srpm, ' '.join(files)) cmd = 'rpm2cpio %s | cpio -i %s &> /dev/null' % (srpm, ' '.join(files))
ret = subprocess.call(cmd, shell=True) ret = subprocess.call(cmd, shell=True)
@ -4928,8 +4940,7 @@ def request_interactive_review(apiurl, request):
request.actions[0].src_project, request.actions[0].src_package, request.actions[0].src_rev, True, False) request.actions[0].src_project, request.actions[0].src_package, request.actions[0].src_rev, True, False)
tmpfile.write(diff) tmpfile.write(diff)
tmpfile.flush() tmpfile.flush()
pager = os.getenv('EDITOR', default='less') run_editor(tmpfile.name)
subprocess.call('%s %s' % (pager, tmpfile.name), shell=True)
elif repl == 'c': elif repl == 'c':
print >>sys.stderr, 'Aborting' print >>sys.stderr, 'Aborting'
raise oscerr.UserAbort() raise oscerr.UserAbort()
@ -4977,7 +4988,7 @@ def get_user_projpkgs(apiurl, user, role=None, exclude_projects=[], proj=True, p
try: try:
res = search(apiurl, **what) res = search(apiurl, **what)
except urllib2.HTTPError, e: except urllib2.HTTPError, e:
if e.code != 400 or not role_filter: if e.code != 400 or not role_filter_xpath:
raise e raise e
# backward compatibility: local role filtering # backward compatibility: local role filtering
what = dict([[kind, role_filter_xpath] for kind in what.keys()]) what = dict([[kind, role_filter_xpath] for kind in what.keys()])

View File

@ -289,7 +289,7 @@ def verify_pacs_old(pac_list):
missing_key = line.split('#')[-1].split(')')[0] missing_key = line.split('#')[-1].split(')')[0]
print >>sys.stderr, """ print >>sys.stderr, """
- If the key is missing, install it first. - If the key (%(name)s) is missing, install it first.
For example, do the following: For example, do the following:
osc signkey PROJECT > file osc signkey PROJECT > file
and, as root: and, as root: