mirror of
https://github.com/openSUSE/osc.git
synced 2025-03-03 14:42:11 +01:00
python3 compatibility: print function
The most visible change in python3 - removal of print statement and all the crufty print >> sys.stderr, foo, The from __future__ import print_function makes it available in python 2.6
This commit is contained in:
parent
0dcbddbafe
commit
7f2031558c
@ -13,6 +13,7 @@
|
|||||||
# along with this program; if not, write to the Free Software
|
# along with this program; if not, write to the Free Software
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import configparser
|
import configparser
|
||||||
|
@ -3,6 +3,8 @@
|
|||||||
# and distributed under the terms of the GNU General Public Licence,
|
# and distributed under the terms of the GNU General Public Licence,
|
||||||
# either version 2, or (at your option) any later version.
|
# either version 2, or (at your option) any later version.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import os.path
|
import os.path
|
||||||
import pdb
|
import pdb
|
||||||
@ -67,42 +69,42 @@ def run(prg):
|
|||||||
if sys.stdout.isatty() and not hasattr(sys, 'ps1'):
|
if sys.stdout.isatty() and not hasattr(sys, 'ps1'):
|
||||||
pdb.post_mortem(sys.exc_info()[2])
|
pdb.post_mortem(sys.exc_info()[2])
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, 'sys.stdout is not a tty. Not jumping into pdb.'
|
print('sys.stdout is not a tty. Not jumping into pdb.', file=sys.stderr)
|
||||||
raise
|
raise
|
||||||
except oscerr.SignalInterrupt:
|
except oscerr.SignalInterrupt:
|
||||||
print >>sys.stderr, 'killed!'
|
print('killed!', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print >>sys.stderr, 'interrupted!'
|
print('interrupted!', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.UserAbort:
|
except oscerr.UserAbort:
|
||||||
print >>sys.stderr, 'aborted.'
|
print('aborted.', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.APIError as e:
|
except oscerr.APIError as e:
|
||||||
print >>sys.stderr, 'BuildService API error:', e.msg
|
print('BuildService API error:', e.msg, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.LinkExpandError as e:
|
except oscerr.LinkExpandError as e:
|
||||||
print >>sys.stderr, 'Link "%s/%s" cannot be expanded:\n' % (e.prj, e.pac), e.msg
|
print('Link "%s/%s" cannot be expanded:\n' % (e.prj, e.pac), e.msg, file=sys.stderr)
|
||||||
print >>sys.stderr, 'Use "osc repairlink" to fix merge conflicts.\n'
|
print('Use "osc repairlink" to fix merge conflicts.\n', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.WorkingCopyWrongVersion as e:
|
except oscerr.WorkingCopyWrongVersion as e:
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.NoWorkingCopy as e:
|
except oscerr.NoWorkingCopy as e:
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
if os.path.isdir('.git'):
|
if os.path.isdir('.git'):
|
||||||
print >>sys.stderr, "Current directory looks like git."
|
print("Current directory looks like git.", file=sys.stderr)
|
||||||
if os.path.isdir('.hg'):
|
if os.path.isdir('.hg'):
|
||||||
print >>sys.stderr, "Current directory looks like mercurial."
|
print("Current directory looks like mercurial.", file=sys.stderr)
|
||||||
if os.path.isdir('.svn'):
|
if os.path.isdir('.svn'):
|
||||||
print >>sys.stderr, "Current directory looks like svn."
|
print("Current directory looks like svn.", file=sys.stderr)
|
||||||
if os.path.isdir('CVS'):
|
if os.path.isdir('CVS'):
|
||||||
print >>sys.stderr, "Current directory looks like cvs."
|
print("Current directory looks like cvs.", file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
print >>sys.stderr, 'Server returned an error:', e
|
print('Server returned an error:', e, file=sys.stderr)
|
||||||
if hasattr(e, 'osc_msg'):
|
if hasattr(e, 'osc_msg'):
|
||||||
print >>sys.stderr, e.osc_msg
|
print(e.osc_msg, file=sys.stderr)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
body = e.read()
|
body = e.read()
|
||||||
@ -111,24 +113,24 @@ def run(prg):
|
|||||||
|
|
||||||
if getattr(prg.options, 'debug', None) or \
|
if getattr(prg.options, 'debug', None) or \
|
||||||
getattr(prg.conf, 'config', {}).get('debug', None):
|
getattr(prg.conf, 'config', {}).get('debug', None):
|
||||||
print >>sys.stderr, e.hdrs
|
print(e.hdrs, file=sys.stderr)
|
||||||
print >>sys.stderr, body
|
print(body, file=sys.stderr)
|
||||||
|
|
||||||
if e.code in [400, 403, 404, 500]:
|
if e.code in [400, 403, 404, 500]:
|
||||||
if '<summary>' in body:
|
if '<summary>' in body:
|
||||||
msg = body.split('<summary>')[1]
|
msg = body.split('<summary>')[1]
|
||||||
msg = msg.split('</summary>')[0]
|
msg = msg.split('</summary>')[0]
|
||||||
print >>sys.stderr, msg
|
print(msg, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except BadStatusLine as e:
|
except BadStatusLine as e:
|
||||||
print >>sys.stderr, 'Server returned an invalid response:', e
|
print('Server returned an invalid response:', e, file=sys.stderr)
|
||||||
print >>sys.stderr, e.line
|
print(e.line, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except HTTPException as e:
|
except HTTPException as e:
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except URLError as e:
|
except URLError as e:
|
||||||
print >>sys.stderr, 'Failed to reach a server:\n', e.reason
|
print('Failed to reach a server:\n', e.reason, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
# ignore broken pipe
|
# ignore broken pipe
|
||||||
@ -138,59 +140,59 @@ def run(prg):
|
|||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except (oscerr.ConfigError, oscerr.NoConfigfile) as e:
|
except (oscerr.ConfigError, oscerr.NoConfigfile) as e:
|
||||||
print >>sys.stderr, e.msg
|
print(e.msg, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.OscIOError as e:
|
except oscerr.OscIOError as e:
|
||||||
print >>sys.stderr, e.msg
|
print(e.msg, file=sys.stderr)
|
||||||
if getattr(prg.options, 'debug', None) or \
|
if getattr(prg.options, 'debug', None) or \
|
||||||
getattr(prg.conf, 'config', {}).get('debug', None):
|
getattr(prg.conf, 'config', {}).get('debug', None):
|
||||||
print >>sys.stderr, e.e
|
print(e.e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except (oscerr.WrongOptions, oscerr.WrongArgs) as e:
|
except (oscerr.WrongOptions, oscerr.WrongArgs) as e:
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
return 2
|
return 2
|
||||||
except oscerr.ExtRuntimeError as e:
|
except oscerr.ExtRuntimeError as e:
|
||||||
print >>sys.stderr, e.file + ':', e.msg
|
print(e.file + ':', e.msg, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.WorkingCopyOutdated as e:
|
except oscerr.WorkingCopyOutdated as e:
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except (oscerr.PackageExists, oscerr.PackageMissing, oscerr.WorkingCopyInconsistent) as e:
|
except (oscerr.PackageExists, oscerr.PackageMissing, oscerr.WorkingCopyInconsistent) as e:
|
||||||
print >>sys.stderr, e.msg
|
print(e.msg, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.PackageInternalError as e:
|
except oscerr.PackageInternalError as e:
|
||||||
print >>sys.stderr, 'a package internal error occured\n' \
|
print('a package internal error occured\n' \
|
||||||
'please file a bug and attach your current package working copy ' \
|
'please file a bug and attach your current package working copy ' \
|
||||||
'and the following traceback to it:'
|
'and the following traceback to it:', file=sys.stderr)
|
||||||
print >>sys.stderr, e.msg
|
print(e.msg, file=sys.stderr)
|
||||||
traceback.print_exc(file=sys.stderr)
|
traceback.print_exc(file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.PackageError as e:
|
except oscerr.PackageError as e:
|
||||||
print >>sys.stderr, e.msg
|
print(e.msg, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except PackageError as e:
|
except PackageError as e:
|
||||||
print >>sys.stderr, '%s:' % e.fname, e.msg
|
print('%s:' % e.fname, e.msg, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except RPMError as e:
|
except RPMError as e:
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except SSLError as e:
|
except SSLError as e:
|
||||||
print >>sys.stderr, "SSL Error:", e
|
print("SSL Error:", e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except SSLVerificationError as e:
|
except SSLVerificationError as e:
|
||||||
print >>sys.stderr, "Certificate Verification Error:", e
|
print("Certificate Verification Error:", e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except NoSecureSSLError as e:
|
except NoSecureSSLError as e:
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except CpioError as e:
|
except CpioError as e:
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
except oscerr.OscBaseError as e:
|
except oscerr.OscBaseError as e:
|
||||||
print >>sys.stderr, '*** Error:', e
|
print('*** Error:', e, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
# vim: sw=4 et
|
# vim: sw=4 et
|
||||||
|
94
osc/build.py
94
osc/build.py
@ -3,7 +3,7 @@
|
|||||||
# and distributed under the terms of the GNU General Public Licence,
|
# and distributed under the terms of the GNU General Public Licence,
|
||||||
# either version 2, or (at your option) any later version.
|
# either version 2, or (at your option) any later version.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@ -85,8 +85,8 @@ class Buildinfo:
|
|||||||
try:
|
try:
|
||||||
tree = ET.parse(filename)
|
tree = ET.parse(filename)
|
||||||
except:
|
except:
|
||||||
print >>sys.stderr, 'could not parse the buildinfo:'
|
print('could not parse the buildinfo:', file=sys.stderr)
|
||||||
print >>sys.stderr, open(filename).read()
|
print(open(filename).read(), file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
root = tree.getroot()
|
root = tree.getroot()
|
||||||
@ -270,7 +270,7 @@ def get_built_files(pacdir, pactype):
|
|||||||
stdout=subprocess.PIPE).stdout.read().strip()
|
stdout=subprocess.PIPE).stdout.read().strip()
|
||||||
s_built = []
|
s_built = []
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, 'WARNING: Unknown package type \'%s\'.' % pactype
|
print('WARNING: Unknown package type \'%s\'.' % pactype, file=sys.stderr)
|
||||||
b_built = []
|
b_built = []
|
||||||
s_built = []
|
s_built = []
|
||||||
return s_built, b_built
|
return s_built, b_built
|
||||||
@ -366,14 +366,14 @@ def check_trusted_projects(apiurl, projects):
|
|||||||
tlen = len(trusted)
|
tlen = len(trusted)
|
||||||
for prj in projects:
|
for prj in projects:
|
||||||
if not prj in trusted:
|
if not prj in trusted:
|
||||||
print "\nThe build root needs packages from project '%s'." % prj
|
print("\nThe build root needs packages from project '%s'." % prj)
|
||||||
print "Note that malicious packages can compromise the build result or even your system."
|
print("Note that malicious packages can compromise the build result or even your system.")
|
||||||
r = raw_input(trustprompt % { 'project':prj })
|
r = raw_input(trustprompt % { 'project':prj })
|
||||||
if r == '1':
|
if r == '1':
|
||||||
print "adding '%s' to ~/.oscrc: ['%s']['trusted_prj']" % (prj,apiurl)
|
print("adding '%s' to ~/.oscrc: ['%s']['trusted_prj']" % (prj,apiurl))
|
||||||
trusted.append(prj)
|
trusted.append(prj)
|
||||||
elif r != '2':
|
elif r != '2':
|
||||||
print "Well, good good bye then :-)"
|
print("Well, good good bye then :-)")
|
||||||
raise oscerr.UserAbort()
|
raise oscerr.UserAbort()
|
||||||
|
|
||||||
if tlen != len(trusted):
|
if tlen != len(trusted):
|
||||||
@ -460,7 +460,7 @@ def main(apiurl, opts, argv):
|
|||||||
elif buildidre.match(build_uid):
|
elif buildidre.match(build_uid):
|
||||||
buildargs.append('--uid=%s' % build_uid)
|
buildargs.append('--uid=%s' % build_uid)
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, 'Error: build-uid arg must be 2 colon separated numerics: "uid:gid" or "caller"'
|
print('Error: build-uid arg must be 2 colon separated numerics: "uid:gid" or "caller"', file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
if opts.vm_type:
|
if opts.vm_type:
|
||||||
vm_type = opts.vm_type
|
vm_type = opts.vm_type
|
||||||
@ -483,7 +483,7 @@ def main(apiurl, opts, argv):
|
|||||||
if var.startswith('OSC_'): var = var[4:]
|
if var.startswith('OSC_'): var = var[4:]
|
||||||
var = var.lower().replace('_', '-')
|
var = var.lower().replace('_', '-')
|
||||||
if var in config:
|
if var in config:
|
||||||
print 'Overriding config value for %s=\'%s\' with \'%s\'' % (var, config[var], val)
|
print('Overriding config value for %s=\'%s\' with \'%s\'' % (var, config[var], val))
|
||||||
config[var] = val
|
config[var] = val
|
||||||
|
|
||||||
pacname = pac
|
pacname = pac
|
||||||
@ -535,7 +535,7 @@ def main(apiurl, opts, argv):
|
|||||||
build_descr_data = s + build_descr_data
|
build_descr_data = s + build_descr_data
|
||||||
|
|
||||||
if opts.prefer_pkgs:
|
if opts.prefer_pkgs:
|
||||||
print 'Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs)
|
print('Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs))
|
||||||
prefer_pkgs, cpio = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type)
|
prefer_pkgs, cpio = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type)
|
||||||
cpio.add(os.path.basename(build_descr), build_descr_data)
|
cpio.add(os.path.basename(build_descr), build_descr_data)
|
||||||
build_descr_data = cpio.get()
|
build_descr_data = cpio.get()
|
||||||
@ -579,18 +579,18 @@ def main(apiurl, opts, argv):
|
|||||||
if opts.noinit:
|
if opts.noinit:
|
||||||
if not os.path.isfile(bi_filename):
|
if not os.path.isfile(bi_filename):
|
||||||
raise oscerr.WrongOptions('--noinit is not possible, no local buildinfo file')
|
raise oscerr.WrongOptions('--noinit is not possible, no local buildinfo file')
|
||||||
print 'Use local \'%s\' file as buildinfo' % bi_filename
|
print('Use local \'%s\' file as buildinfo' % bi_filename)
|
||||||
if not os.path.isfile(bc_filename):
|
if not os.path.isfile(bc_filename):
|
||||||
raise oscerr.WrongOptions('--noinit is not possible, no local buildconfig file')
|
raise oscerr.WrongOptions('--noinit is not possible, no local buildconfig file')
|
||||||
print 'Use local \'%s\' file as buildconfig' % bc_filename
|
print('Use local \'%s\' file as buildconfig' % bc_filename)
|
||||||
elif opts.offline:
|
elif opts.offline:
|
||||||
if not os.path.isfile(bi_filename):
|
if not os.path.isfile(bi_filename):
|
||||||
raise oscerr.WrongOptions('--offline is not possible, no local buildinfo file')
|
raise oscerr.WrongOptions('--offline is not possible, no local buildinfo file')
|
||||||
print 'Use local \'%s\' file as buildinfo' % bi_filename
|
print('Use local \'%s\' file as buildinfo' % bi_filename)
|
||||||
if not os.path.isfile(bc_filename):
|
if not os.path.isfile(bc_filename):
|
||||||
raise oscerr.WrongOptions('--offline is not possible, no local buildconfig file')
|
raise oscerr.WrongOptions('--offline is not possible, no local buildconfig file')
|
||||||
else:
|
else:
|
||||||
print 'Getting buildinfo from server and store to %s' % bi_filename
|
print('Getting buildinfo from server and store to %s' % bi_filename)
|
||||||
bi_text = ''.join(get_buildinfo(apiurl,
|
bi_text = ''.join(get_buildinfo(apiurl,
|
||||||
prj,
|
prj,
|
||||||
pac,
|
pac,
|
||||||
@ -603,7 +603,7 @@ def main(apiurl, opts, argv):
|
|||||||
# maybe we should check for errors before saving the file
|
# maybe we should check for errors before saving the file
|
||||||
bi_file.write(bi_text)
|
bi_file.write(bi_text)
|
||||||
bi_file.flush()
|
bi_file.flush()
|
||||||
print 'Getting buildconfig from server and store to %s' % bc_filename
|
print('Getting buildconfig from server and store to %s' % bc_filename)
|
||||||
bc = get_buildconfig(apiurl, prj, repo)
|
bc = get_buildconfig(apiurl, prj, repo)
|
||||||
if not bc_file:
|
if not bc_file:
|
||||||
bc_file = open(bc_filename, 'w')
|
bc_file = open(bc_filename, 'w')
|
||||||
@ -624,13 +624,13 @@ def main(apiurl, opts, argv):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
if pkg_meta_e:
|
if pkg_meta_e:
|
||||||
print >>sys.stderr, 'ERROR: Either wrong repo/arch as parameter or a parse error of .spec/.dsc/.kiwi file due to syntax error'
|
print('ERROR: Either wrong repo/arch as parameter or a parse error of .spec/.dsc/.kiwi file due to syntax error', file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, 'The package \'%s\' does not exists - please ' \
|
print('The package \'%s\' does not exists - please ' \
|
||||||
'rerun with \'--local-package\'' % pac
|
'rerun with \'--local-package\'' % pac, file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, 'The project \'%s\' does not exists - please ' \
|
print('The project \'%s\' does not exists - please ' \
|
||||||
'rerun with \'--alternative-project <alternative_project>\'' % prj
|
'rerun with \'--alternative-project <alternative_project>\'' % prj, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
@ -651,19 +651,19 @@ def main(apiurl, opts, argv):
|
|||||||
# arch we are supposed to build for
|
# arch we are supposed to build for
|
||||||
if bi.hostarch != None:
|
if bi.hostarch != None:
|
||||||
if hostarch != bi.hostarch and not bi.hostarch in can_also_build.get(hostarch, []):
|
if hostarch != bi.hostarch and not bi.hostarch in can_also_build.get(hostarch, []):
|
||||||
print >>sys.stderr, 'Error: hostarch \'%s\' is required.' % (bi.hostarch)
|
print('Error: hostarch \'%s\' is required.' % (bi.hostarch), file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
elif hostarch != bi.buildarch:
|
elif hostarch != bi.buildarch:
|
||||||
if not bi.buildarch in can_also_build.get(hostarch, []):
|
if not bi.buildarch in can_also_build.get(hostarch, []):
|
||||||
# OBSOLETE: qemu_can_build should not be needed anymore since OBS 2.3
|
# OBSOLETE: qemu_can_build should not be needed anymore since OBS 2.3
|
||||||
if vm_type != "emulator" and not bi.buildarch in qemu_can_build:
|
if vm_type != "emulator" and not bi.buildarch in qemu_can_build:
|
||||||
print >>sys.stderr, 'Error: hostarch \'%s\' cannot build \'%s\'.' % (hostarch, bi.buildarch)
|
print('Error: hostarch \'%s\' cannot build \'%s\'.' % (hostarch, bi.buildarch), file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
print >>sys.stderr, 'WARNING: It is guessed to build on hostarch \'%s\' for \'%s\' via QEMU.' % (hostarch, bi.buildarch)
|
print('WARNING: It is guessed to build on hostarch \'%s\' for \'%s\' via QEMU.' % (hostarch, bi.buildarch), file=sys.stderr)
|
||||||
|
|
||||||
rpmlist_prefers = []
|
rpmlist_prefers = []
|
||||||
if prefer_pkgs:
|
if prefer_pkgs:
|
||||||
print 'Evaluating preferred packages'
|
print('Evaluating preferred packages')
|
||||||
for name, path in prefer_pkgs.tems():
|
for name, path in prefer_pkgs.tems():
|
||||||
if bi.has_dep(name):
|
if bi.has_dep(name):
|
||||||
# We remove a preferred package from the buildinfo, so that the
|
# We remove a preferred package from the buildinfo, so that the
|
||||||
@ -673,9 +673,9 @@ def main(apiurl, opts, argv):
|
|||||||
# not verified.
|
# not verified.
|
||||||
bi.remove_dep(name)
|
bi.remove_dep(name)
|
||||||
rpmlist_prefers.append((name, path))
|
rpmlist_prefers.append((name, path))
|
||||||
print ' - %s (%s)' % (name, path)
|
print(' - %s (%s)' % (name, path))
|
||||||
|
|
||||||
print 'Updating cache of required packages'
|
print('Updating cache of required packages')
|
||||||
|
|
||||||
urllist = []
|
urllist = []
|
||||||
if not opts.download_api_only:
|
if not opts.download_api_only:
|
||||||
@ -730,10 +730,10 @@ def main(apiurl, opts, argv):
|
|||||||
destdir = os.path.join(cache_dir, data[0], data[2], data[3])
|
destdir = os.path.join(cache_dir, data[0], data[2], data[3])
|
||||||
old_pkg_dir = None
|
old_pkg_dir = None
|
||||||
try:
|
try:
|
||||||
print "Downloading previous build from %s ..." % '/'.join(data)
|
print("Downloading previous build from %s ..." % '/'.join(data))
|
||||||
binaries = get_binarylist(apiurl, data[0], data[2], data[3], package=data[1], verbose=True)
|
binaries = get_binarylist(apiurl, data[0], data[2], data[3], package=data[1], verbose=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print "Error: failed to get binaries: %s" % str(e)
|
print("Error: failed to get binaries: %s" % str(e))
|
||||||
binaries = []
|
binaries = []
|
||||||
|
|
||||||
if binaries:
|
if binaries:
|
||||||
@ -802,7 +802,7 @@ def main(apiurl, opts, argv):
|
|||||||
if not os.path.exists(os.path.join(pradir)):
|
if not os.path.exists(os.path.join(pradir)):
|
||||||
os.makedirs(os.path.join(pradir))
|
os.makedirs(os.path.join(pradir))
|
||||||
if not os.path.exists(tffn):
|
if not os.path.exists(tffn):
|
||||||
print "Using package: "+sffn
|
print("Using package: "+sffn)
|
||||||
if opts.linksources:
|
if opts.linksources:
|
||||||
os.link(sffn, tffn)
|
os.link(sffn, tffn)
|
||||||
else:
|
else:
|
||||||
@ -810,7 +810,7 @@ def main(apiurl, opts, argv):
|
|||||||
if prefer_pkgs:
|
if prefer_pkgs:
|
||||||
for name, path in prefer_pkgs.items():
|
for name, path in prefer_pkgs.items():
|
||||||
if name == filename:
|
if name == filename:
|
||||||
print "Using prefered package: " + path + "/" + filename
|
print("Using prefered package: " + path + "/" + filename)
|
||||||
os.unlink(tffn)
|
os.unlink(tffn)
|
||||||
if opts.linksources:
|
if opts.linksources:
|
||||||
os.link(path + "/" + filename, tffn)
|
os.link(path + "/" + filename, tffn)
|
||||||
@ -818,22 +818,22 @@ def main(apiurl, opts, argv):
|
|||||||
os.symlink(path + "/" + filename, tffn)
|
os.symlink(path + "/" + filename, tffn)
|
||||||
|
|
||||||
if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc":
|
if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc":
|
||||||
print 'Skipping verification of package signatures due to secure VM build'
|
print('Skipping verification of package signatures due to secure VM build')
|
||||||
elif bi.pacsuffix == 'rpm':
|
elif bi.pacsuffix == 'rpm':
|
||||||
if opts.no_verify:
|
if opts.no_verify:
|
||||||
print 'Skipping verification of package signatures'
|
print('Skipping verification of package signatures')
|
||||||
else:
|
else:
|
||||||
print 'Verifying integrity of cached packages'
|
print('Verifying integrity of cached packages')
|
||||||
verify_pacs(bi)
|
verify_pacs(bi)
|
||||||
elif bi.pacsuffix == 'deb':
|
elif bi.pacsuffix == 'deb':
|
||||||
if opts.no_verify or opts.noinit:
|
if opts.no_verify or opts.noinit:
|
||||||
print 'Skipping verification of package signatures'
|
print('Skipping verification of package signatures')
|
||||||
else:
|
else:
|
||||||
print 'WARNING: deb packages get not verified, they can compromise your system !'
|
print('WARNING: deb packages get not verified, they can compromise your system !')
|
||||||
else:
|
else:
|
||||||
print 'WARNING: unknown packages get not verified, they can compromise your system !'
|
print('WARNING: unknown packages get not verified, they can compromise your system !')
|
||||||
|
|
||||||
print 'Writing build configuration'
|
print('Writing build configuration')
|
||||||
|
|
||||||
rpmlist = [ '%s %s\n' % (i.name, i.fullfilename) for i in bi.deps if not i.noinstall ]
|
rpmlist = [ '%s %s\n' % (i.name, i.fullfilename) for i in bi.deps if not i.noinstall ]
|
||||||
rpmlist += [ '%s %s\n' % (i[0], i[1]) for i in rpmlist_prefers ]
|
rpmlist += [ '%s %s\n' % (i[0], i[1]) for i in rpmlist_prefers ]
|
||||||
@ -888,10 +888,10 @@ def main(apiurl, opts, argv):
|
|||||||
|
|
||||||
|
|
||||||
if opts.preload:
|
if opts.preload:
|
||||||
print "Preload done for selected repo/arch."
|
print("Preload done for selected repo/arch.")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
print 'Running build'
|
print('Running build')
|
||||||
cmd = [ config['build-cmd'], '--root='+build_root,
|
cmd = [ config['build-cmd'], '--root='+build_root,
|
||||||
'--rpmlist='+rpmlist_filename,
|
'--rpmlist='+rpmlist_filename,
|
||||||
'--dist='+bc_filename,
|
'--dist='+bc_filename,
|
||||||
@ -915,11 +915,11 @@ def main(apiurl, opts, argv):
|
|||||||
try:
|
try:
|
||||||
rc = run_external(cmd[0], *cmd[1:])
|
rc = run_external(cmd[0], *cmd[1:])
|
||||||
if rc:
|
if rc:
|
||||||
print
|
print()
|
||||||
print 'The buildroot was:', build_root
|
print('The buildroot was:', build_root)
|
||||||
sys.exit(rc)
|
sys.exit(rc)
|
||||||
except KeyboardInterrupt as i:
|
except KeyboardInterrupt as i:
|
||||||
print "keyboard interrupt, killing build ..."
|
print("keyboard interrupt, killing build ...")
|
||||||
cmd.append('--kill')
|
cmd.append('--kill')
|
||||||
run_external(cmd[0], *cmd[1:])
|
run_external(cmd[0], *cmd[1:])
|
||||||
raise i
|
raise i
|
||||||
@ -932,10 +932,10 @@ def main(apiurl, opts, argv):
|
|||||||
if os.path.exists(pacdir):
|
if os.path.exists(pacdir):
|
||||||
(s_built, b_built) = get_built_files(pacdir, bi.pacsuffix)
|
(s_built, b_built) = get_built_files(pacdir, bi.pacsuffix)
|
||||||
|
|
||||||
print
|
print()
|
||||||
if s_built: print s_built
|
if s_built: print(s_built)
|
||||||
print
|
print()
|
||||||
print b_built
|
print(b_built)
|
||||||
|
|
||||||
if opts.keep_pkgs:
|
if opts.keep_pkgs:
|
||||||
for i in b_built.splitlines() + s_built.splitlines():
|
for i in b_built.splitlines() + s_built.splitlines():
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
import os
|
import os
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
@ -29,7 +31,7 @@ class Checker:
|
|||||||
try:
|
try:
|
||||||
self.readkey(key)
|
self.readkey(key)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print e
|
print(e)
|
||||||
|
|
||||||
if not len(self.imported):
|
if not len(self.imported):
|
||||||
raise KeyError('', "no key imported")
|
raise KeyError('', "no key imported")
|
||||||
|
16
osc/cmdln.py
16
osc/cmdln.py
@ -3,6 +3,8 @@
|
|||||||
# Author: Trent Mick (TrentM@ActiveState.com)
|
# Author: Trent Mick (TrentM@ActiveState.com)
|
||||||
# Home: http://trentm.com/projects/cmdln/
|
# Home: http://trentm.com/projects/cmdln/
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
"""An improvement on Python's standard cmd.py module.
|
"""An improvement on Python's standard cmd.py module.
|
||||||
|
|
||||||
As with cmd.py, this module provides "a simple framework for writing
|
As with cmd.py, this module provides "a simple framework for writing
|
||||||
@ -1443,8 +1445,8 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False):
|
|||||||
"""
|
"""
|
||||||
DEBUG = False
|
DEBUG = False
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
print "dedent: dedent(..., tabsize=%d, skip_first_line=%r)"\
|
print("dedent: dedent(..., tabsize=%d, skip_first_line=%r)"\
|
||||||
% (tabsize, skip_first_line)
|
% (tabsize, skip_first_line))
|
||||||
indents = []
|
indents = []
|
||||||
margin = None
|
margin = None
|
||||||
for i, line in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
@ -1461,12 +1463,12 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
continue # skip all-whitespace lines
|
continue # skip all-whitespace lines
|
||||||
if DEBUG: print "dedent: indent=%d: %r" % (indent, line)
|
if DEBUG: print("dedent: indent=%d: %r" % (indent, line))
|
||||||
if margin is None:
|
if margin is None:
|
||||||
margin = indent
|
margin = indent
|
||||||
else:
|
else:
|
||||||
margin = min(margin, indent)
|
margin = min(margin, indent)
|
||||||
if DEBUG: print "dedent: margin=%r" % margin
|
if DEBUG: print("dedent: margin=%r" % margin)
|
||||||
|
|
||||||
if margin is not None and margin > 0:
|
if margin is not None and margin > 0:
|
||||||
for i, line in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
@ -1478,7 +1480,7 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False):
|
|||||||
elif ch == '\t':
|
elif ch == '\t':
|
||||||
removed += tabsize - (removed % tabsize)
|
removed += tabsize - (removed % tabsize)
|
||||||
elif ch in '\r\n':
|
elif ch in '\r\n':
|
||||||
if DEBUG: print "dedent: %r: EOL -> strip up to EOL" % line
|
if DEBUG: print("dedent: %r: EOL -> strip up to EOL" % line)
|
||||||
lines[i] = lines[i][j:]
|
lines[i] = lines[i][j:]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
@ -1486,8 +1488,8 @@ def _dedentlines(lines, tabsize=8, skip_first_line=False):
|
|||||||
"line %r while removing %d-space margin"
|
"line %r while removing %d-space margin"
|
||||||
% (ch, line, margin))
|
% (ch, line, margin))
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
print "dedent: %r: %r -> removed %d/%d"\
|
print("dedent: %r: %r -> removed %d/%d"\
|
||||||
% (line, ch, removed, margin)
|
% (line, ch, removed, margin))
|
||||||
if removed == margin:
|
if removed == margin:
|
||||||
lines[i] = lines[i][j+1:]
|
lines[i] = lines[i][j+1:]
|
||||||
break
|
break
|
||||||
|
File diff suppressed because it is too large
Load Diff
32
osc/conf.py
32
osc/conf.py
@ -3,6 +3,8 @@
|
|||||||
# and distributed under the terms of the GNU General Public Licence,
|
# and distributed under the terms of the GNU General Public Licence,
|
||||||
# either version 2, or version 3 (at your option).
|
# either version 2, or version 3 (at your option).
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
"""Read osc configuration and store it in a dictionary
|
"""Read osc configuration and store it in a dictionary
|
||||||
|
|
||||||
This module reads and parses ~/.oscrc. The resulting configuration is stored
|
This module reads and parses ~/.oscrc. The resulting configuration is stored
|
||||||
@ -414,8 +416,8 @@ def get_apiurl_usr(apiurl):
|
|||||||
try:
|
try:
|
||||||
return get_apiurl_api_host_options(apiurl)['user']
|
return get_apiurl_api_host_options(apiurl)['user']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print >>sys.stderr, 'no specific section found in config file for host of [\'%s\'] - using default user: \'%s\'' \
|
print('no specific section found in config file for host of [\'%s\'] - using default user: \'%s\'' \
|
||||||
% (apiurl, config['user'])
|
% (apiurl, config['user']), file=sys.stderr)
|
||||||
return config['user']
|
return config['user']
|
||||||
|
|
||||||
|
|
||||||
@ -446,8 +448,8 @@ def _build_opener(url):
|
|||||||
authhandler_class = urllib2.HTTPBasicAuthHandler
|
authhandler_class = urllib2.HTTPBasicAuthHandler
|
||||||
if sys.version_info >= (2, 6, 6) and sys.version_info < (2, 7, 1) \
|
if sys.version_info >= (2, 6, 6) and sys.version_info < (2, 7, 1) \
|
||||||
and not 'reset_retry_count' in dir(urllib2.HTTPBasicAuthHandler):
|
and not 'reset_retry_count' in dir(urllib2.HTTPBasicAuthHandler):
|
||||||
print >>sys.stderr, 'warning: your urllib2 version seems to be broken. ' \
|
print('warning: your urllib2 version seems to be broken. ' \
|
||||||
'Using a workaround for http://bugs.python.org/issue9639'
|
'Using a workaround for http://bugs.python.org/issue9639', file=sys.stderr)
|
||||||
|
|
||||||
class OscHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
class OscHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||||
def http_error_401(self, *args):
|
def http_error_401(self, *args):
|
||||||
@ -491,7 +493,7 @@ def _build_opener(url):
|
|||||||
from . import oscssl
|
from . import oscssl
|
||||||
from M2Crypto import m2urllib2
|
from M2Crypto import m2urllib2
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
print e
|
print(e)
|
||||||
raise NoSecureSSLError('M2Crypto is needed to access %s in a secure way.\nPlease install python-m2crypto.' % apiurl)
|
raise NoSecureSSLError('M2Crypto is needed to access %s in a secure way.\nPlease install python-m2crypto.' % apiurl)
|
||||||
|
|
||||||
cafile = options.get('cafile', None)
|
cafile = options.get('cafile', None)
|
||||||
@ -511,7 +513,7 @@ def _build_opener(url):
|
|||||||
raise Exception('No CA certificates found')
|
raise Exception('No CA certificates found')
|
||||||
opener = m2urllib2.build_opener(ctx, oscssl.myHTTPSHandler(ssl_context=ctx, appname='osc'), urllib2.HTTPCookieProcessor(cookiejar), authhandler, proxyhandler)
|
opener = m2urllib2.build_opener(ctx, oscssl.myHTTPSHandler(ssl_context=ctx, appname='osc'), urllib2.HTTPCookieProcessor(cookiejar), authhandler, proxyhandler)
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, "WARNING: SSL certificate checks disabled. Connection is insecure!\n"
|
print("WARNING: SSL certificate checks disabled. Connection is insecure!\n", file=sys.stderr)
|
||||||
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar), authhandler, proxyhandler)
|
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar), authhandler, proxyhandler)
|
||||||
opener.addheaders = [('User-agent', 'osc/%s' % __version__)]
|
opener.addheaders = [('User-agent', 'osc/%s' % __version__)]
|
||||||
_build_opener.last_opener = (apiurl, opener)
|
_build_opener.last_opener = (apiurl, opener)
|
||||||
@ -826,21 +828,21 @@ def get_config(override_conffile=None,
|
|||||||
password = gk_data[0]['password']
|
password = gk_data[0]['password']
|
||||||
else:
|
else:
|
||||||
# this is most likely an error
|
# this is most likely an error
|
||||||
print >>sys.stderr, 'warning: no password found in keyring'
|
print('warning: no password found in keyring', file=sys.stderr)
|
||||||
except gnomekeyring.NoMatchError:
|
except gnomekeyring.NoMatchError:
|
||||||
# Fallback to file based auth.
|
# Fallback to file based auth.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not user is None and len(user) == 0:
|
if not user is None and len(user) == 0:
|
||||||
user = None
|
user = None
|
||||||
print >>sys.stderr, 'Warning: blank user in the keyring for the ' \
|
print('Warning: blank user in the keyring for the ' \
|
||||||
'apiurl %s.\nPlease fix your keyring entry.'
|
'apiurl %s.\nPlease fix your keyring entry.', file=sys.stderr)
|
||||||
|
|
||||||
if user is not None and password is None:
|
if user is not None and password is None:
|
||||||
err = ('no password defined for "%s".\nPlease fix your keyring '
|
err = ('no password defined for "%s".\nPlease fix your keyring '
|
||||||
'entry or gnome-keyring setup.\nAssuming an empty password.'
|
'entry or gnome-keyring setup.\nAssuming an empty password.'
|
||||||
% url)
|
% url)
|
||||||
print >>sys.stderr, err
|
print(err, file=sys.stderr)
|
||||||
password = ''
|
password = ''
|
||||||
|
|
||||||
# Read credentials from config
|
# Read credentials from config
|
||||||
@ -863,12 +865,12 @@ def get_config(override_conffile=None,
|
|||||||
if config['plaintext_passwd'] and passwordx or not config['plaintext_passwd'] and password:
|
if config['plaintext_passwd'] and passwordx or not config['plaintext_passwd'] and password:
|
||||||
if config['plaintext_passwd']:
|
if config['plaintext_passwd']:
|
||||||
if password != passwordx:
|
if password != passwordx:
|
||||||
print >>sys.stderr, '%s: rewriting from encoded pass to plain pass' % url
|
print('%s: rewriting from encoded pass to plain pass' % url, file=sys.stderr)
|
||||||
add_section(conffile, url, user, passwordx)
|
add_section(conffile, url, user, passwordx)
|
||||||
password = passwordx
|
password = passwordx
|
||||||
else:
|
else:
|
||||||
if password != passwordx:
|
if password != passwordx:
|
||||||
print >>sys.stderr, '%s: rewriting from plain pass to encoded pass' % url
|
print('%s: rewriting from plain pass to encoded pass' % url, file=sys.stderr)
|
||||||
add_section(conffile, url, user, password)
|
add_section(conffile, url, user, password)
|
||||||
|
|
||||||
if not config['plaintext_passwd']:
|
if not config['plaintext_passwd']:
|
||||||
@ -925,10 +927,10 @@ def get_config(override_conffile=None,
|
|||||||
scheme = config.get('scheme', 'https')
|
scheme = config.get('scheme', 'https')
|
||||||
config['apiurl'] = urljoin(scheme, apisrv)
|
config['apiurl'] = urljoin(scheme, apisrv)
|
||||||
if 'apisrc' in config or 'scheme' in config:
|
if 'apisrc' in config or 'scheme' in config:
|
||||||
print >>sys.stderr, 'Warning: Use of the \'scheme\' or \'apisrv\' in ~/.oscrc is deprecated!\n' \
|
print('Warning: Use of the \'scheme\' or \'apisrv\' in ~/.oscrc is deprecated!\n' \
|
||||||
'Warning: See README for migration details.'
|
'Warning: See README for migration details.', file=sys.stderr)
|
||||||
if 'build_platform' in config:
|
if 'build_platform' in config:
|
||||||
print >>sys.stderr, 'Warning: Use of \'build_platform\' config option is deprecated! (use \'build_repository\' instead)'
|
print('Warning: Use of \'build_platform\' config option is deprecated! (use \'build_repository\' instead)', file=sys.stderr)
|
||||||
config['build_repository'] = config['build_platform']
|
config['build_repository'] = config['build_platform']
|
||||||
|
|
||||||
config['verbose'] = int(config['verbose'])
|
config['verbose'] = int(config['verbose'])
|
||||||
|
304
osc/core.py
304
osc/core.py
@ -3,6 +3,8 @@
|
|||||||
# and distributed under the terms of the GNU General Public Licence,
|
# and distributed under the terms of the GNU General Public Licence,
|
||||||
# either version 2, or version 3 (at your option).
|
# either version 2, or version 3 (at your option).
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
__version__ = '0.139git'
|
__version__ = '0.139git'
|
||||||
|
|
||||||
# __store_version__ is to be incremented when the format of the working copy
|
# __store_version__ is to be incremented when the format of the working copy
|
||||||
@ -373,11 +375,11 @@ class Serviceinfo:
|
|||||||
raise oscerr.PackageNotInstalled("obs-service-"+name)
|
raise oscerr.PackageNotInstalled("obs-service-"+name)
|
||||||
cmd = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
|
cmd = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
|
||||||
if conf.config['verbose'] > 1 or verbose:
|
if conf.config['verbose'] > 1 or verbose:
|
||||||
print "Run source service:", cmd
|
print("Run source service:", cmd)
|
||||||
r = run_external(cmd, shell=True)
|
r = run_external(cmd, shell=True)
|
||||||
|
|
||||||
if r != 0:
|
if r != 0:
|
||||||
print "Aborting: service call failed: " + c
|
print("Aborting: service call failed: " + c)
|
||||||
# FIXME: addDownloadUrlService calls si.execute after
|
# FIXME: addDownloadUrlService calls si.execute after
|
||||||
# updating _services.
|
# updating _services.
|
||||||
for filename in os.listdir(temp_dir):
|
for filename in os.listdir(temp_dir):
|
||||||
@ -614,7 +616,7 @@ class Project:
|
|||||||
msg = 'can\'t add package \'%s\': Object already exists' % pac
|
msg = 'can\'t add package \'%s\': Object already exists' % pac
|
||||||
raise oscerr.PackageExists(self.name, pac, msg)
|
raise oscerr.PackageExists(self.name, pac, msg)
|
||||||
else:
|
else:
|
||||||
print 'checking out new package %s' % pac
|
print('checking out new package %s' % pac)
|
||||||
checkout_package(self.apiurl, self.name, pac, \
|
checkout_package(self.apiurl, self.name, pac, \
|
||||||
pathname=getTransActPath(os.path.join(self.dir, pac)), \
|
pathname=getTransActPath(os.path.join(self.dir, pac)), \
|
||||||
prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
|
prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
|
||||||
@ -742,25 +744,25 @@ class Project:
|
|||||||
if pac.status(filename) != '?':
|
if pac.status(filename) != '?':
|
||||||
# this is not really necessary
|
# this is not really necessary
|
||||||
pac.put_on_deletelist(filename)
|
pac.put_on_deletelist(filename)
|
||||||
print statfrmt('D', getTransActPath(os.path.join(pac.dir, filename)))
|
print(statfrmt('D', getTransActPath(os.path.join(pac.dir, filename))))
|
||||||
print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
|
print(statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name))))
|
||||||
pac.write_deletelist()
|
pac.write_deletelist()
|
||||||
self.set_state(pac.name, 'D')
|
self.set_state(pac.name, 'D')
|
||||||
self.write_packages()
|
self.write_packages()
|
||||||
else:
|
else:
|
||||||
print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
|
print('package \'%s\' has local modifications (see osc st for details)' % pac.name)
|
||||||
elif state == 'A':
|
elif state == 'A':
|
||||||
if force:
|
if force:
|
||||||
delete_dir(pac.absdir)
|
delete_dir(pac.absdir)
|
||||||
self.del_package_node(pac.name)
|
self.del_package_node(pac.name)
|
||||||
self.write_packages()
|
self.write_packages()
|
||||||
print statfrmt('D', pac.name)
|
print(statfrmt('D', pac.name))
|
||||||
else:
|
else:
|
||||||
print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
|
print('package \'%s\' has local modifications (see osc st for details)' % pac.name)
|
||||||
elif state == None:
|
elif state == None:
|
||||||
print 'package is not under version control'
|
print('package is not under version control')
|
||||||
else:
|
else:
|
||||||
print 'unsupported state'
|
print('unsupported state')
|
||||||
|
|
||||||
def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
|
def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
|
||||||
if len(pacs):
|
if len(pacs):
|
||||||
@ -806,13 +808,13 @@ class Project:
|
|||||||
p.mark_frozen()
|
p.mark_frozen()
|
||||||
else:
|
else:
|
||||||
rev = p.linkinfo.xsrcmd5
|
rev = p.linkinfo.xsrcmd5
|
||||||
print 'Expanding to rev', rev
|
print('Expanding to rev', rev)
|
||||||
elif unexpand_link and p.islink() and p.isexpanded():
|
elif unexpand_link and p.islink() and p.isexpanded():
|
||||||
rev = p.linkinfo.lsrcmd5
|
rev = p.linkinfo.lsrcmd5
|
||||||
print 'Unexpanding to rev', rev
|
print('Unexpanding to rev', rev)
|
||||||
elif p.islink() and p.isexpanded():
|
elif p.islink() and p.isexpanded():
|
||||||
rev = p.latest_rev()
|
rev = p.latest_rev()
|
||||||
print 'Updating %s' % p.name
|
print('Updating %s' % p.name)
|
||||||
p.update(rev, service_files)
|
p.update(rev, service_files)
|
||||||
if unexpand_link:
|
if unexpand_link:
|
||||||
p.unmark_frozen()
|
p.unmark_frozen()
|
||||||
@ -832,7 +834,7 @@ class Project:
|
|||||||
# do nothing
|
# do nothing
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
print 'unexpected state.. package \'%s\'' % pac
|
print('unexpected state.. package \'%s\'' % pac)
|
||||||
|
|
||||||
self.checkout_missing_pacs(expand_link=not unexpand_link)
|
self.checkout_missing_pacs(expand_link=not unexpand_link)
|
||||||
finally:
|
finally:
|
||||||
@ -859,9 +861,9 @@ class Project:
|
|||||||
p.todo = todo
|
p.todo = todo
|
||||||
p.commit(msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
p.commit(msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||||
elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
|
elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
|
||||||
print 'osc: \'%s\' is not under version control' % pac
|
print('osc: \'%s\' is not under version control' % pac)
|
||||||
elif pac in self.pacs_broken:
|
elif pac in self.pacs_broken:
|
||||||
print 'osc: \'%s\' package not found' % pac
|
print('osc: \'%s\' package not found' % pac)
|
||||||
elif state == None:
|
elif state == None:
|
||||||
self.commitExtPackage(pac, msg, todo, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
self.commitExtPackage(pac, msg, todo, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||||
finally:
|
finally:
|
||||||
@ -888,7 +890,7 @@ class Project:
|
|||||||
def commitNewPackage(self, pac, msg = '', files = [], verbose = False, skip_local_service_run = False):
|
def commitNewPackage(self, pac, msg = '', files = [], verbose = False, skip_local_service_run = False):
|
||||||
"""creates and commits a new package if it does not exist on the server"""
|
"""creates and commits a new package if it does not exist on the server"""
|
||||||
if pac in self.pacs_available:
|
if pac in self.pacs_available:
|
||||||
print 'package \'%s\' already exists' % pac
|
print('package \'%s\' already exists' % pac)
|
||||||
else:
|
else:
|
||||||
user = conf.get_apiurl_usr(self.apiurl)
|
user = conf.get_apiurl_usr(self.apiurl)
|
||||||
edit_meta(metatype='pkg',
|
edit_meta(metatype='pkg',
|
||||||
@ -905,7 +907,7 @@ class Project:
|
|||||||
else:
|
else:
|
||||||
p = Package(os.path.join(self.dir, pac))
|
p = Package(os.path.join(self.dir, pac))
|
||||||
p.todo = files
|
p.todo = files
|
||||||
print statfrmt('Sending', os.path.normpath(p.dir))
|
print(statfrmt('Sending', os.path.normpath(p.dir)))
|
||||||
p.commit(msg=msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
p.commit(msg=msg, verbose=verbose, skip_local_service_run=skip_local_service_run)
|
||||||
self.set_state(pac, ' ')
|
self.set_state(pac, ' ')
|
||||||
os.chdir(olddir)
|
os.chdir(olddir)
|
||||||
@ -928,7 +930,7 @@ class Project:
|
|||||||
except OSError:
|
except OSError:
|
||||||
pac_dir = os.path.join(self.dir, pac)
|
pac_dir = os.path.join(self.dir, pac)
|
||||||
#print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
|
#print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
|
||||||
print statfrmt('Deleting', getTransActPath(pac_dir))
|
print(statfrmt('Deleting', getTransActPath(pac_dir)))
|
||||||
delete_package(self.apiurl, self.name, pac)
|
delete_package(self.apiurl, self.name, pac)
|
||||||
self.del_package_node(pac)
|
self.del_package_node(pac)
|
||||||
|
|
||||||
@ -1102,7 +1104,7 @@ class Package:
|
|||||||
pathname = n
|
pathname = n
|
||||||
self.to_be_added.append(n)
|
self.to_be_added.append(n)
|
||||||
self.write_addlist()
|
self.write_addlist()
|
||||||
print statfrmt('A', pathname)
|
print(statfrmt('A', pathname))
|
||||||
|
|
||||||
def delete_file(self, n, force=False):
|
def delete_file(self, n, force=False):
|
||||||
"""deletes a file if possible and marks the file as deleted"""
|
"""deletes a file if possible and marks the file as deleted"""
|
||||||
@ -1302,16 +1304,16 @@ class Package:
|
|||||||
continue
|
continue
|
||||||
st = self.status(filename)
|
st = self.status(filename)
|
||||||
if st == 'C':
|
if st == 'C':
|
||||||
print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
|
print('Please resolve all conflicts before committing using "osc resolved FILE"!')
|
||||||
return 1
|
return 1
|
||||||
elif filename in self.todo:
|
elif filename in self.todo:
|
||||||
if st in ('A', 'R', 'M'):
|
if st in ('A', 'R', 'M'):
|
||||||
todo_send[filename] = dgst(os.path.join(self.absdir, filename))
|
todo_send[filename] = dgst(os.path.join(self.absdir, filename))
|
||||||
real_send.append(filename)
|
real_send.append(filename)
|
||||||
print statfrmt('Sending', os.path.join(pathn, filename))
|
print(statfrmt('Sending', os.path.join(pathn, filename)))
|
||||||
elif st in (' ', '!', 'S'):
|
elif st in (' ', '!', 'S'):
|
||||||
if st == '!' and filename in self.to_be_added:
|
if st == '!' and filename in self.to_be_added:
|
||||||
print 'file \'%s\' is marked as \'A\' but does not exist' % filename
|
print('file \'%s\' is marked as \'A\' but does not exist' % filename)
|
||||||
return 1
|
return 1
|
||||||
f = self.findfilebyname(filename)
|
f = self.findfilebyname(filename)
|
||||||
if f is None:
|
if f is None:
|
||||||
@ -1321,7 +1323,7 @@ class Package:
|
|||||||
todo_send[filename] = f.md5
|
todo_send[filename] = f.md5
|
||||||
elif st == 'D':
|
elif st == 'D':
|
||||||
todo_delete.append(filename)
|
todo_delete.append(filename)
|
||||||
print statfrmt('Deleting', os.path.join(pathn, filename))
|
print(statfrmt('Deleting', os.path.join(pathn, filename)))
|
||||||
elif st in ('R', 'M', 'D', ' ', '!', 'S'):
|
elif st in ('R', 'M', 'D', ' ', '!', 'S'):
|
||||||
# ignore missing new file (it's not part of the current commit)
|
# ignore missing new file (it's not part of the current commit)
|
||||||
if st == '!' and filename in self.to_be_added:
|
if st == '!' and filename in self.to_be_added:
|
||||||
@ -1334,10 +1336,10 @@ class Package:
|
|||||||
todo_send[filename] = f.md5
|
todo_send[filename] = f.md5
|
||||||
|
|
||||||
if not real_send and not todo_delete and not self.islinkrepair() and not self.ispulled():
|
if not real_send and not todo_delete and not self.islinkrepair() and not self.ispulled():
|
||||||
print 'nothing to do for package %s' % self.name
|
print('nothing to do for package %s' % self.name)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
print 'Transmitting file data ',
|
print('Transmitting file data', end=' ')
|
||||||
filelist = self.__generate_commitlist(todo_send)
|
filelist = self.__generate_commitlist(todo_send)
|
||||||
sfilelist = self.__send_commitlog(msg, filelist)
|
sfilelist = self.__send_commitlog(msg, filelist)
|
||||||
send = self.__get_todo_send(sfilelist)
|
send = self.__get_todo_send(sfilelist)
|
||||||
@ -1363,8 +1365,8 @@ class Package:
|
|||||||
self.put_source_file(filename, copy_only=True)
|
self.put_source_file(filename, copy_only=True)
|
||||||
|
|
||||||
self.rev = sfilelist.get('rev')
|
self.rev = sfilelist.get('rev')
|
||||||
print
|
print()
|
||||||
print 'Committed revision %s.' % self.rev
|
print('Committed revision %s.' % self.rev)
|
||||||
|
|
||||||
if self.ispulled():
|
if self.ispulled():
|
||||||
os.unlink(os.path.join(self.storedir, '_pulled'))
|
os.unlink(os.path.join(self.storedir, '_pulled'))
|
||||||
@ -1372,7 +1374,7 @@ class Package:
|
|||||||
os.unlink(os.path.join(self.storedir, '_linkrepair'))
|
os.unlink(os.path.join(self.storedir, '_linkrepair'))
|
||||||
self.linkrepair = False
|
self.linkrepair = False
|
||||||
# XXX: mark package as invalid?
|
# XXX: mark package as invalid?
|
||||||
print 'The source link has been repaired. This directory can now be removed.'
|
print('The source link has been repaired. This directory can now be removed.')
|
||||||
|
|
||||||
if self.islink() and self.isexpanded():
|
if self.islink() and self.isexpanded():
|
||||||
li = Linkinfo()
|
li = Linkinfo()
|
||||||
@ -1396,7 +1398,7 @@ class Package:
|
|||||||
# FIXME: add testcases for this codepath
|
# FIXME: add testcases for this codepath
|
||||||
sinfo = sfilelist.find('serviceinfo')
|
sinfo = sfilelist.find('serviceinfo')
|
||||||
if sinfo is not None:
|
if sinfo is not None:
|
||||||
print 'Waiting for server side source service run'
|
print('Waiting for server side source service run')
|
||||||
u = makeurl(self.apiurl, ['source', self.prjname, self.name])
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name])
|
||||||
while sinfo is not None and sinfo.get('code') == 'running':
|
while sinfo is not None and sinfo.get('code') == 'running':
|
||||||
sys.stdout.write('.')
|
sys.stdout.write('.')
|
||||||
@ -1405,7 +1407,7 @@ class Package:
|
|||||||
sfilelist = ET.fromstring(http_GET(u).read())
|
sfilelist = ET.fromstring(http_GET(u).read())
|
||||||
# if sinfo is None another commit might have occured in the "meantime"
|
# if sinfo is None another commit might have occured in the "meantime"
|
||||||
sinfo = sfilelist.find('serviceinfo')
|
sinfo = sfilelist.find('serviceinfo')
|
||||||
print ''
|
print('')
|
||||||
rev=self.latest_rev()
|
rev=self.latest_rev()
|
||||||
self.update(rev=rev)
|
self.update(rev=rev)
|
||||||
|
|
||||||
@ -1846,14 +1848,14 @@ rev: %s
|
|||||||
if len(speclist) == 1:
|
if len(speclist) == 1:
|
||||||
specfile = speclist[0]
|
specfile = speclist[0]
|
||||||
elif len(speclist) > 1:
|
elif len(speclist) > 1:
|
||||||
print 'the following specfiles were found:'
|
print('the following specfiles were found:')
|
||||||
for filename in speclist:
|
for filename in speclist:
|
||||||
print filename
|
print(filename)
|
||||||
print 'please specify one with --specfile'
|
print('please specify one with --specfile')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print 'no specfile was found - please specify one ' \
|
print('no specfile was found - please specify one ' \
|
||||||
'with --specfile'
|
'with --specfile')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
|
data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
|
||||||
@ -1882,11 +1884,11 @@ rev: %s
|
|||||||
mf = metafile(u, ET.tostring(root))
|
mf = metafile(u, ET.tostring(root))
|
||||||
|
|
||||||
if not force:
|
if not force:
|
||||||
print '*' * 36, 'old', '*' * 36
|
print('*' * 36, 'old', '*' * 36)
|
||||||
print m
|
print(m)
|
||||||
print '*' * 36, 'new', '*' * 36
|
print('*' * 36, 'new', '*' * 36)
|
||||||
print ET.tostring(root)
|
print(ET.tostring(root))
|
||||||
print '*' * 72
|
print('*' * 72)
|
||||||
repl = raw_input('Write? (y/N/e) ')
|
repl = raw_input('Write? (y/N/e) ')
|
||||||
else:
|
else:
|
||||||
repl = 'y'
|
repl = 'y'
|
||||||
@ -1900,11 +1902,11 @@ rev: %s
|
|||||||
|
|
||||||
def mark_frozen(self):
|
def mark_frozen(self):
|
||||||
store_write_string(self.absdir, '_frozenlink', '')
|
store_write_string(self.absdir, '_frozenlink', '')
|
||||||
print
|
print()
|
||||||
print "The link in this package is currently broken. Checking"
|
print("The link in this package is currently broken. Checking")
|
||||||
print "out the last working version instead; please use 'osc pull'"
|
print("out the last working version instead; please use 'osc pull'")
|
||||||
print "to merge the conflicts."
|
print("to merge the conflicts.")
|
||||||
print
|
print()
|
||||||
|
|
||||||
def unmark_frozen(self):
|
def unmark_frozen(self):
|
||||||
if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
|
if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
|
||||||
@ -1976,7 +1978,7 @@ rev: %s
|
|||||||
if not size_limit is None:
|
if not size_limit is None:
|
||||||
self.size_limit = int(size_limit)
|
self.size_limit = int(size_limit)
|
||||||
if os.path.isfile(os.path.join(self.storedir, '_in_update', '_files')):
|
if os.path.isfile(os.path.join(self.storedir, '_in_update', '_files')):
|
||||||
print 'resuming broken update...'
|
print('resuming broken update...')
|
||||||
root = ET.parse(os.path.join(self.storedir, '_in_update', '_files')).getroot()
|
root = ET.parse(os.path.join(self.storedir, '_in_update', '_files')).getroot()
|
||||||
rfiles = self.__get_files(root)
|
rfiles = self.__get_files(root)
|
||||||
kept, added, deleted, services = self.__get_rev_changes(rfiles)
|
kept, added, deleted, services = self.__get_rev_changes(rfiles)
|
||||||
@ -2002,9 +2004,9 @@ rev: %s
|
|||||||
os.close(fd)
|
os.close(fd)
|
||||||
os.rename(wcfile, tmpfile)
|
os.rename(wcfile, tmpfile)
|
||||||
os.rename(origfile, wcfile)
|
os.rename(origfile, wcfile)
|
||||||
print 'warning: it seems you modified \'%s\' after the broken ' \
|
print('warning: it seems you modified \'%s\' after the broken ' \
|
||||||
'update. Restored original file and saved modified version ' \
|
'update. Restored original file and saved modified version ' \
|
||||||
'to \'%s\'.' % (wcfile, tmpfile)
|
'to \'%s\'.' % (wcfile, tmpfile))
|
||||||
elif not os.path.isfile(wcfile):
|
elif not os.path.isfile(wcfile):
|
||||||
# this is strange... because it existed before the update. restore it
|
# this is strange... because it existed before the update. restore it
|
||||||
os.rename(origfile, wcfile)
|
os.rename(origfile, wcfile)
|
||||||
@ -2053,7 +2055,7 @@ rev: %s
|
|||||||
# ok, the update can't fail due to existing files
|
# ok, the update can't fail due to existing files
|
||||||
for f in added:
|
for f in added:
|
||||||
self.updatefile(f.name, rev, f.mtime)
|
self.updatefile(f.name, rev, f.mtime)
|
||||||
print statfrmt('A', os.path.join(pathn, f.name))
|
print(statfrmt('A', os.path.join(pathn, f.name)))
|
||||||
for f in deleted:
|
for f in deleted:
|
||||||
# if the storefile doesn't exist we're resuming an aborted update:
|
# if the storefile doesn't exist we're resuming an aborted update:
|
||||||
# the file was already deleted but we cannot know this
|
# the file was already deleted but we cannot know this
|
||||||
@ -2062,7 +2064,7 @@ rev: %s
|
|||||||
# if self.status(f.name) != 'M':
|
# if self.status(f.name) != 'M':
|
||||||
self.delete_localfile(f.name)
|
self.delete_localfile(f.name)
|
||||||
self.delete_storefile(f.name)
|
self.delete_storefile(f.name)
|
||||||
print statfrmt('D', os.path.join(pathn, f.name))
|
print(statfrmt('D', os.path.join(pathn, f.name)))
|
||||||
if f.name in self.to_be_deleted:
|
if f.name in self.to_be_deleted:
|
||||||
self.to_be_deleted.remove(f.name)
|
self.to_be_deleted.remove(f.name)
|
||||||
self.write_deletelist()
|
self.write_deletelist()
|
||||||
@ -2076,21 +2078,21 @@ rev: %s
|
|||||||
elif state == 'M':
|
elif state == 'M':
|
||||||
# try to merge changes
|
# try to merge changes
|
||||||
merge_status = self.mergefile(f.name, rev, f.mtime)
|
merge_status = self.mergefile(f.name, rev, f.mtime)
|
||||||
print statfrmt(merge_status, os.path.join(pathn, f.name))
|
print(statfrmt(merge_status, os.path.join(pathn, f.name)))
|
||||||
elif state == '!':
|
elif state == '!':
|
||||||
self.updatefile(f.name, rev, f.mtime)
|
self.updatefile(f.name, rev, f.mtime)
|
||||||
print 'Restored \'%s\'' % os.path.join(pathn, f.name)
|
print('Restored \'%s\'' % os.path.join(pathn, f.name))
|
||||||
elif state == 'C':
|
elif state == 'C':
|
||||||
get_source_file(self.apiurl, self.prjname, self.name, f.name,
|
get_source_file(self.apiurl, self.prjname, self.name, f.name,
|
||||||
targetfilename=os.path.join(self.storedir, f.name), revision=rev,
|
targetfilename=os.path.join(self.storedir, f.name), revision=rev,
|
||||||
progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
|
progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
|
||||||
print 'skipping \'%s\' (this is due to conflicts)' % f.name
|
print('skipping \'%s\' (this is due to conflicts)' % f.name)
|
||||||
elif state == 'D' and self.findfilebyname(f.name).md5 != f.md5:
|
elif state == 'D' and self.findfilebyname(f.name).md5 != f.md5:
|
||||||
# XXX: in the worst case we might end up with f.name being
|
# XXX: in the worst case we might end up with f.name being
|
||||||
# in _to_be_deleted and in _in_conflict... this needs to be checked
|
# in _to_be_deleted and in _in_conflict... this needs to be checked
|
||||||
if os.path.exists(os.path.join(self.absdir, f.name)):
|
if os.path.exists(os.path.join(self.absdir, f.name)):
|
||||||
merge_status = self.mergefile(f.name, rev, f.mtime)
|
merge_status = self.mergefile(f.name, rev, f.mtime)
|
||||||
print statfrmt(merge_status, os.path.join(pathn, f.name))
|
print(statfrmt(merge_status, os.path.join(pathn, f.name)))
|
||||||
if merge_status == 'C':
|
if merge_status == 'C':
|
||||||
# state changes from delete to conflict
|
# state changes from delete to conflict
|
||||||
self.to_be_deleted.remove(f.name)
|
self.to_be_deleted.remove(f.name)
|
||||||
@ -2099,23 +2101,23 @@ rev: %s
|
|||||||
# XXX: we cannot recover this case because we've no file
|
# XXX: we cannot recover this case because we've no file
|
||||||
# to backup
|
# to backup
|
||||||
self.updatefile(f.name, rev, f.mtime)
|
self.updatefile(f.name, rev, f.mtime)
|
||||||
print statfrmt('U', os.path.join(pathn, f.name))
|
print(statfrmt('U', os.path.join(pathn, f.name)))
|
||||||
elif state == ' ' and self.findfilebyname(f.name).md5 != f.md5:
|
elif state == ' ' and self.findfilebyname(f.name).md5 != f.md5:
|
||||||
self.updatefile(f.name, rev, f.mtime)
|
self.updatefile(f.name, rev, f.mtime)
|
||||||
print statfrmt('U', os.path.join(pathn, f.name))
|
print(statfrmt('U', os.path.join(pathn, f.name)))
|
||||||
|
|
||||||
# checkout service files
|
# checkout service files
|
||||||
for f in services:
|
for f in services:
|
||||||
get_source_file(self.apiurl, self.prjname, self.name, f.name,
|
get_source_file(self.apiurl, self.prjname, self.name, f.name,
|
||||||
targetfilename=os.path.join(self.absdir, f.name), revision=rev,
|
targetfilename=os.path.join(self.absdir, f.name), revision=rev,
|
||||||
progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
|
progress_obj=self.progress_obj, mtime=f.mtime, meta=self.meta)
|
||||||
print statfrmt('A', os.path.join(pathn, f.name))
|
print(statfrmt('A', os.path.join(pathn, f.name)))
|
||||||
store_write_string(self.absdir, '_files', fm + '\n')
|
store_write_string(self.absdir, '_files', fm + '\n')
|
||||||
if not self.meta:
|
if not self.meta:
|
||||||
self.update_local_pacmeta()
|
self.update_local_pacmeta()
|
||||||
self.update_datastructs()
|
self.update_datastructs()
|
||||||
|
|
||||||
print 'At revision %s.' % self.rev
|
print('At revision %s.' % self.rev)
|
||||||
|
|
||||||
def run_source_services(self, mode=None, singleservice=None, verbose=None):
|
def run_source_services(self, mode=None, singleservice=None, verbose=None):
|
||||||
if self.name.startswith("_"):
|
if self.name.startswith("_"):
|
||||||
@ -2835,7 +2837,7 @@ def makeurl(baseurl, l, query=[]):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if conf.config['verbose'] > 1:
|
if conf.config['verbose'] > 1:
|
||||||
print 'makeurl:', baseurl, l, query
|
print('makeurl:', baseurl, l, query)
|
||||||
|
|
||||||
if isinstance(query, type(list())):
|
if isinstance(query, type(list())):
|
||||||
query = '&'.join(query)
|
query = '&'.join(query)
|
||||||
@ -2853,7 +2855,7 @@ def http_request(method, url, headers={}, data=None, file=None, timeout=100):
|
|||||||
filefd = None
|
filefd = None
|
||||||
|
|
||||||
if conf.config['http_debug']:
|
if conf.config['http_debug']:
|
||||||
print >>sys.stderr, '\n\n--', method, url
|
print('\n\n--', method, url, file=sys.stderr)
|
||||||
|
|
||||||
if method == 'POST' and not file and not data:
|
if method == 'POST' and not file and not data:
|
||||||
# adding data to an urllib2 request transforms it into a POST
|
# adding data to an urllib2 request transforms it into a POST
|
||||||
@ -2877,7 +2879,7 @@ def http_request(method, url, headers={}, data=None, file=None, timeout=100):
|
|||||||
|
|
||||||
if isinstance(headers, type({})):
|
if isinstance(headers, type({})):
|
||||||
for i in headers.keys():
|
for i in headers.keys():
|
||||||
print headers[i]
|
print(headers[i])
|
||||||
req.add_header(i, headers[i])
|
req.add_header(i, headers[i])
|
||||||
|
|
||||||
if file and not data:
|
if file and not data:
|
||||||
@ -2903,7 +2905,7 @@ def http_request(method, url, headers={}, data=None, file=None, timeout=100):
|
|||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if conf.config['debug']: print >>sys.stderr, method, url
|
if conf.config['debug']: print(method, url, file=sys.stderr)
|
||||||
|
|
||||||
old_timeout = socket.getdefaulttimeout()
|
old_timeout = socket.getdefaulttimeout()
|
||||||
# XXX: dirty hack as timeout doesn't work with python-m2crypto
|
# XXX: dirty hack as timeout doesn't work with python-m2crypto
|
||||||
@ -3138,16 +3140,16 @@ class metafile:
|
|||||||
|
|
||||||
def sync(self):
|
def sync(self):
|
||||||
if self.change_is_required and self.hash_orig == dgst(self.filename):
|
if self.change_is_required and self.hash_orig == dgst(self.filename):
|
||||||
print 'File unchanged. Not saving.'
|
print('File unchanged. Not saving.')
|
||||||
os.unlink(self.filename)
|
os.unlink(self.filename)
|
||||||
return
|
return
|
||||||
|
|
||||||
print 'Sending meta data...'
|
print('Sending meta data...')
|
||||||
# don't do any exception handling... it's up to the caller what to do in case
|
# don't do any exception handling... it's up to the caller what to do in case
|
||||||
# of an exception
|
# of an exception
|
||||||
http_PUT(self.url, file=self.filename)
|
http_PUT(self.url, file=self.filename)
|
||||||
os.unlink(self.filename)
|
os.unlink(self.filename)
|
||||||
print 'Done.'
|
print('Done.')
|
||||||
|
|
||||||
def edit(self):
|
def edit(self):
|
||||||
try:
|
try:
|
||||||
@ -3161,12 +3163,12 @@ class metafile:
|
|||||||
if e.headers.get('X-Opensuse-Errorcode'):
|
if e.headers.get('X-Opensuse-Errorcode'):
|
||||||
error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
|
error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
|
||||||
|
|
||||||
print >>sys.stderr, 'BuildService API error:', error_help
|
print('BuildService API error:', error_help, file=sys.stderr)
|
||||||
# examine the error - we can't raise an exception because we might want
|
# examine the error - we can't raise an exception because we might want
|
||||||
# to try again
|
# to try again
|
||||||
data = e.read()
|
data = e.read()
|
||||||
if '<summary>' in data:
|
if '<summary>' in data:
|
||||||
print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
|
print(data.split('<summary>')[1].split('</summary>')[0], file=sys.stderr)
|
||||||
ri = raw_input('Try again? ([y/N]): ')
|
ri = raw_input('Try again? ([y/N]): ')
|
||||||
if ri not in ['y', 'Y']:
|
if ri not in ['y', 'Y']:
|
||||||
break
|
break
|
||||||
@ -3175,7 +3177,7 @@ class metafile:
|
|||||||
|
|
||||||
def discard(self):
|
def discard(self):
|
||||||
if os.path.exists(self.filename):
|
if os.path.exists(self.filename):
|
||||||
print 'discarding %s' % self.filename
|
print('discarding %s' % self.filename)
|
||||||
os.unlink(self.filename)
|
os.unlink(self.filename)
|
||||||
|
|
||||||
|
|
||||||
@ -3439,7 +3441,7 @@ def run_pager(message, tmp_suffix=''):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if not sys.stdout.isatty():
|
if not sys.stdout.isatty():
|
||||||
print message
|
print(message)
|
||||||
else:
|
else:
|
||||||
tmpfile = tempfile.NamedTemporaryFile(suffix=tmp_suffix)
|
tmpfile = tempfile.NamedTemporaryFile(suffix=tmp_suffix)
|
||||||
tmpfile.write(message)
|
tmpfile.write(message)
|
||||||
@ -3603,9 +3605,9 @@ def create_submit_request(apiurl,
|
|||||||
r = root.get('id')
|
r = root.get('id')
|
||||||
except urllib2.HTTPError as e:
|
except urllib2.HTTPError as e:
|
||||||
if e.headers.get('X-Opensuse-Errorcode') == "submit_request_rejected":
|
if e.headers.get('X-Opensuse-Errorcode') == "submit_request_rejected":
|
||||||
print "WARNING:"
|
print("WARNING:")
|
||||||
print "WARNING: Project does not accept submit request, request to open a NEW maintenance incident instead"
|
print("WARNING: Project does not accept submit request, request to open a NEW maintenance incident instead")
|
||||||
print "WARNING:"
|
print("WARNING:")
|
||||||
xpath = 'maintenance/maintains/@project = \'%s\'' % dst_project
|
xpath = 'maintenance/maintains/@project = \'%s\'' % dst_project
|
||||||
res = search(apiurl, project_id=xpath)
|
res = search(apiurl, project_id=xpath)
|
||||||
root = res['project_id']
|
root = res['project_id']
|
||||||
@ -3680,7 +3682,7 @@ def change_request_state_template(req, newstate):
|
|||||||
try:
|
try:
|
||||||
return tmpl % data
|
return tmpl % data
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print >>sys.stderr, 'error: cannot interpolate \'%s\' in \'%s\'' % (e.args[0], tmpl_name)
|
print('error: cannot interpolate \'%s\' in \'%s\'' % (e.args[0], tmpl_name), file=sys.stderr)
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def get_review_list(apiurl, project='', package='', byuser='', bygroup='', byproject='', bypackage='', states=('new')):
|
def get_review_list(apiurl, project='', package='', byuser='', bygroup='', byproject='', bypackage='', states=('new')):
|
||||||
@ -3730,7 +3732,7 @@ def get_review_list(apiurl, project='', package='', byuser='', bygroup='', bypro
|
|||||||
xpath = xpath_join(xpath, xpath_base % {'kind': kind, 'val': val}, op='and', nexpr_parentheses=True)
|
xpath = xpath_join(xpath, xpath_base % {'kind': kind, 'val': val}, op='and', nexpr_parentheses=True)
|
||||||
|
|
||||||
if conf.config['verbose'] > 1:
|
if conf.config['verbose'] > 1:
|
||||||
print '[ %s ]' % xpath
|
print('[ %s ]' % xpath)
|
||||||
res = search(apiurl, request=xpath)
|
res = search(apiurl, request=xpath)
|
||||||
collection = res['request']
|
collection = res['request']
|
||||||
requests = []
|
requests = []
|
||||||
@ -3760,7 +3762,7 @@ def get_exact_request_list(apiurl, src_project, dst_project, src_package=None, d
|
|||||||
xpath += " and action/@type=\'%s\'" % req_type
|
xpath += " and action/@type=\'%s\'" % req_type
|
||||||
|
|
||||||
if conf.config['verbose'] > 1:
|
if conf.config['verbose'] > 1:
|
||||||
print '[ %s ]' % xpath
|
print('[ %s ]' % xpath)
|
||||||
|
|
||||||
res = search(apiurl, request=xpath)
|
res = search(apiurl, request=xpath)
|
||||||
collection = res['request']
|
collection = res['request']
|
||||||
@ -3802,7 +3804,7 @@ def get_request_list(apiurl, project='', package='', req_who='', req_state=('new
|
|||||||
'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
|
'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
|
||||||
|
|
||||||
if conf.config['verbose'] > 1:
|
if conf.config['verbose'] > 1:
|
||||||
print '[ %s ]' % xpath
|
print('[ %s ]' % xpath)
|
||||||
res = search(apiurl, request=xpath)
|
res = search(apiurl, request=xpath)
|
||||||
collection = res['request']
|
collection = res['request']
|
||||||
requests = []
|
requests = []
|
||||||
@ -3874,11 +3876,11 @@ def check_existing_requests(apiurl, src_project, src_package, dst_project,
|
|||||||
req_state=['new','review', 'declined'])
|
req_state=['new','review', 'declined'])
|
||||||
repl = ''
|
repl = ''
|
||||||
if reqs:
|
if reqs:
|
||||||
print 'There are already the following submit request: %s.' % \
|
print('There are already the following submit request: %s.' % \
|
||||||
', '.join([i.reqid for i in reqs])
|
', '.join([i.reqid for i in reqs]))
|
||||||
repl = raw_input('Supersede the old requests? (y/n/c) ')
|
repl = raw_input('Supersede the old requests? (y/n/c) ')
|
||||||
if repl.lower() == 'c':
|
if repl.lower() == 'c':
|
||||||
print >>sys.stderr, 'Aborting'
|
print('Aborting', file=sys.stderr)
|
||||||
raise oscerr.UserAbort()
|
raise oscerr.UserAbort()
|
||||||
return repl == 'y', reqs
|
return repl == 'y', reqs
|
||||||
|
|
||||||
@ -3888,7 +3890,7 @@ def get_group(apiurl, group):
|
|||||||
f = http_GET(u)
|
f = http_GET(u)
|
||||||
return ''.join(f.readlines())
|
return ''.join(f.readlines())
|
||||||
except urllib2.HTTPError:
|
except urllib2.HTTPError:
|
||||||
print 'user \'%s\' not found' % group
|
print('user \'%s\' not found' % group)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_user_meta(apiurl, user):
|
def get_user_meta(apiurl, user):
|
||||||
@ -3897,7 +3899,7 @@ def get_user_meta(apiurl, user):
|
|||||||
f = http_GET(u)
|
f = http_GET(u)
|
||||||
return ''.join(f.readlines())
|
return ''.join(f.readlines())
|
||||||
except urllib2.HTTPError:
|
except urllib2.HTTPError:
|
||||||
print 'user \'%s\' not found' % user
|
print('user \'%s\' not found' % user)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -3916,7 +3918,7 @@ def get_user_data(apiurl, user, *tags):
|
|||||||
data.append('-')
|
data.append('-')
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
# this part is reached if the tags tuple contains an invalid tag
|
# this part is reached if the tags tuple contains an invalid tag
|
||||||
print 'The xml file for user \'%s\' seems to be broken' % user
|
print('The xml file for user \'%s\' seems to be broken' % user)
|
||||||
return []
|
return []
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@ -4200,7 +4202,7 @@ def make_dir(apiurl, project, package, pathname=None, prj_dir=None, package_trac
|
|||||||
if not is_project_dir(prj_dir):
|
if not is_project_dir(prj_dir):
|
||||||
# this directory could exist as a parent direory for one of our earlier
|
# this directory could exist as a parent direory for one of our earlier
|
||||||
# checked out sub-projects. in this case, we still need to initialize it.
|
# checked out sub-projects. in this case, we still need to initialize it.
|
||||||
print statfrmt('A', prj_dir)
|
print(statfrmt('A', prj_dir))
|
||||||
Project.init_project(apiurl, prj_dir, project, package_tracking)
|
Project.init_project(apiurl, prj_dir, project, package_tracking)
|
||||||
|
|
||||||
if is_project_dir(os.path.join(prj_dir, package)):
|
if is_project_dir(os.path.join(prj_dir, package)):
|
||||||
@ -4211,7 +4213,7 @@ def make_dir(apiurl, project, package, pathname=None, prj_dir=None, package_trac
|
|||||||
pathname = pkg_path
|
pathname = pkg_path
|
||||||
|
|
||||||
if not os.path.exists(pkg_path):
|
if not os.path.exists(pkg_path):
|
||||||
print statfrmt('A', pathname)
|
print(statfrmt('A', pathname))
|
||||||
os.mkdir(os.path.join(pkg_path))
|
os.mkdir(os.path.join(pkg_path))
|
||||||
# os.mkdir(os.path.join(prj_dir, package, store))
|
# os.mkdir(os.path.join(prj_dir, package, store))
|
||||||
|
|
||||||
@ -4241,7 +4243,7 @@ def checkout_package(apiurl, project, package,
|
|||||||
if conf.config['checkout_rooted']:
|
if conf.config['checkout_rooted']:
|
||||||
if prj_dir[:1] == '/':
|
if prj_dir[:1] == '/':
|
||||||
if conf.config['verbose'] > 1:
|
if conf.config['verbose'] > 1:
|
||||||
print "checkout_rooted ignored for %s" % prj_dir
|
print("checkout_rooted ignored for %s" % prj_dir)
|
||||||
# ?? should we complain if not is_project_dir(prj_dir) ??
|
# ?? should we complain if not is_project_dir(prj_dir) ??
|
||||||
else:
|
else:
|
||||||
# if we are inside a project or package dir, ascend to parent
|
# if we are inside a project or package dir, ascend to parent
|
||||||
@ -4269,7 +4271,7 @@ def checkout_package(apiurl, project, package,
|
|||||||
|
|
||||||
if root_dots != '.':
|
if root_dots != '.':
|
||||||
if conf.config['verbose']:
|
if conf.config['verbose']:
|
||||||
print "found root of %s at %s" % (oldproj, root_dots)
|
print("found root of %s at %s" % (oldproj, root_dots))
|
||||||
prj_dir = root_dots + prj_dir
|
prj_dir = root_dots + prj_dir
|
||||||
|
|
||||||
if not pathname:
|
if not pathname:
|
||||||
@ -4305,7 +4307,7 @@ def checkout_package(apiurl, project, package,
|
|||||||
prj_obj.write_packages()
|
prj_obj.write_packages()
|
||||||
p.update(revision, server_service_files, size_limit)
|
p.update(revision, server_service_files, size_limit)
|
||||||
if service_files:
|
if service_files:
|
||||||
print 'Running all source services local'
|
print('Running all source services local')
|
||||||
p.run_source_services()
|
p.run_source_services()
|
||||||
|
|
||||||
def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
|
def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
|
||||||
@ -4383,10 +4385,10 @@ def link_pac(src_project, src_package, dst_project, dst_package, force, rev='',
|
|||||||
# but first, make sure not to overwrite an existing one
|
# but first, make sure not to overwrite an existing one
|
||||||
if '_link' in meta_get_filelist(apiurl, dst_project, dst_package):
|
if '_link' in meta_get_filelist(apiurl, dst_project, dst_package):
|
||||||
if force:
|
if force:
|
||||||
print >>sys.stderr, 'forced overwrite of existing _link file'
|
print('forced overwrite of existing _link file', file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr
|
print(file=sys.stderr)
|
||||||
print >>sys.stderr, '_link file already exists...! Aborting'
|
print('_link file already exists...! Aborting', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if rev:
|
if rev:
|
||||||
@ -4408,7 +4410,7 @@ def link_pac(src_project, src_package, dst_project, dst_package, force, rev='',
|
|||||||
else:
|
else:
|
||||||
cicount = ''
|
cicount = ''
|
||||||
|
|
||||||
print 'Creating _link...',
|
print('Creating _link...', end=' ')
|
||||||
|
|
||||||
project = ''
|
project = ''
|
||||||
if src_project != dst_project:
|
if src_project != dst_project:
|
||||||
@ -4428,7 +4430,7 @@ def link_pac(src_project, src_package, dst_project, dst_package, force, rev='',
|
|||||||
|
|
||||||
u = makeurl(apiurl, ['source', dst_project, dst_package, '_link'])
|
u = makeurl(apiurl, ['source', dst_project, dst_package, '_link'])
|
||||||
http_PUT(u, data=link_template)
|
http_PUT(u, data=link_template)
|
||||||
print 'Done.'
|
print('Done.')
|
||||||
|
|
||||||
def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False, nosources = False):
|
def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False, nosources = False):
|
||||||
"""
|
"""
|
||||||
@ -4474,11 +4476,11 @@ def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map =
|
|||||||
# create the _aggregate file
|
# create the _aggregate file
|
||||||
# but first, make sure not to overwrite an existing one
|
# but first, make sure not to overwrite an existing one
|
||||||
if '_aggregate' in meta_get_filelist(apiurl, dst_project, dst_package):
|
if '_aggregate' in meta_get_filelist(apiurl, dst_project, dst_package):
|
||||||
print >>sys.stderr
|
print(file=sys.stderr)
|
||||||
print >>sys.stderr, '_aggregate file already exists...! Aborting'
|
print('_aggregate file already exists...! Aborting', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
print 'Creating _aggregate...',
|
print('Creating _aggregate...', end=' ')
|
||||||
aggregate_template = """\
|
aggregate_template = """\
|
||||||
<aggregatelist>
|
<aggregatelist>
|
||||||
<aggregate project="%s">
|
<aggregate project="%s">
|
||||||
@ -4504,7 +4506,7 @@ def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map =
|
|||||||
|
|
||||||
u = makeurl(apiurl, ['source', dst_project, dst_package, '_aggregate'])
|
u = makeurl(apiurl, ['source', dst_project, dst_package, '_aggregate'])
|
||||||
http_PUT(u, data=aggregate_template)
|
http_PUT(u, data=aggregate_template)
|
||||||
print 'Done.'
|
print('Done.')
|
||||||
|
|
||||||
|
|
||||||
def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False, force=False, noaccess=False, add_repositories=False, dryrun=False, nodevelproject=False, maintenance=False):
|
def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False, force=False, noaccess=False, add_repositories=False, dryrun=False, nodevelproject=False, maintenance=False):
|
||||||
@ -4549,7 +4551,7 @@ def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute,
|
|||||||
return root
|
return root
|
||||||
# TODO: change api here and return parsed XML as class
|
# TODO: change api here and return parsed XML as class
|
||||||
if conf.config['http_debug']:
|
if conf.config['http_debug']:
|
||||||
print >> sys.stderr, ET.tostring(root)
|
print(ET.tostring(root), file=sys.stderr)
|
||||||
for node in root.findall('data'):
|
for node in root.findall('data'):
|
||||||
r = node.get('name')
|
r = node.get('name')
|
||||||
if r and r == 'targetproject':
|
if r and r == 'targetproject':
|
||||||
@ -4602,7 +4604,7 @@ def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None,
|
|||||||
return (True, m.group(1), m.group(2), None, None)
|
return (True, m.group(1), m.group(2), None, None)
|
||||||
|
|
||||||
if conf.config['http_debug']:
|
if conf.config['http_debug']:
|
||||||
print >> sys.stderr, ET.tostring(root)
|
print(ET.tostring(root), file=sys.stderr)
|
||||||
data = {}
|
data = {}
|
||||||
for i in ET.fromstring(f.read()).findall('data'):
|
for i in ET.fromstring(f.read()).findall('data'):
|
||||||
data[i.get('name')] = i.text
|
data[i.get('name')] = i.text
|
||||||
@ -4642,11 +4644,11 @@ def copy_pac(src_apiurl, src_project, src_package,
|
|||||||
except urllib2.HTTPError as e:
|
except urllib2.HTTPError as e:
|
||||||
pass
|
pass
|
||||||
if force_meta_update or not found:
|
if force_meta_update or not found:
|
||||||
print 'Sending meta data...'
|
print('Sending meta data...')
|
||||||
u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
|
u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
|
||||||
http_PUT(u, data=src_meta)
|
http_PUT(u, data=src_meta)
|
||||||
|
|
||||||
print 'Copying files...'
|
print('Copying files...')
|
||||||
if not client_side_copy:
|
if not client_side_copy:
|
||||||
query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
|
query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
|
||||||
if expand or keep_link:
|
if expand or keep_link:
|
||||||
@ -4669,7 +4671,7 @@ def copy_pac(src_apiurl, src_project, src_package,
|
|||||||
for n in meta_get_filelist(src_apiurl, src_project, src_package, expand=expand, revision=revision):
|
for n in meta_get_filelist(src_apiurl, src_project, src_package, expand=expand, revision=revision):
|
||||||
if n.startswith('_service:') or n.startswith('_service_'):
|
if n.startswith('_service:') or n.startswith('_service_'):
|
||||||
continue
|
continue
|
||||||
print ' ', n
|
print(' ', n)
|
||||||
tmpfile = None
|
tmpfile = None
|
||||||
try:
|
try:
|
||||||
(fd, tmpfile) = tempfile.mkstemp(prefix='osc-copypac')
|
(fd, tmpfile) = tempfile.mkstemp(prefix='osc-copypac')
|
||||||
@ -4953,7 +4955,7 @@ def get_results(apiurl, prj, package, lastbuild=None, repository=[], arch=[], ve
|
|||||||
r.append(result_line_templ % res)
|
r.append(result_line_templ % res)
|
||||||
|
|
||||||
if printJoin:
|
if printJoin:
|
||||||
print printJoin.join(r)
|
print(printJoin.join(r))
|
||||||
|
|
||||||
if wait==False or waiting==False:
|
if wait==False or waiting==False:
|
||||||
break
|
break
|
||||||
@ -5076,8 +5078,8 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non
|
|||||||
try:
|
try:
|
||||||
st = buildstatus_symbols[status[pac][tg]]
|
st = buildstatus_symbols[status[pac][tg]]
|
||||||
except:
|
except:
|
||||||
print 'osc: warn: unknown status \'%s\'...' % status[pac][tg]
|
print('osc: warn: unknown status \'%s\'...' % status[pac][tg])
|
||||||
print 'please edit osc/core.py, and extend the buildstatus_symbols dictionary.'
|
print('please edit osc/core.py, and extend the buildstatus_symbols dictionary.')
|
||||||
st = '?'
|
st = '?'
|
||||||
buildstatus_symbols[status[pac][tg]] = '?'
|
buildstatus_symbols[status[pac][tg]] = '?'
|
||||||
line.append(st)
|
line.append(st)
|
||||||
@ -5105,8 +5107,8 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non
|
|||||||
try:
|
try:
|
||||||
st = buildstatus_symbols[status[pac][tg]]
|
st = buildstatus_symbols[status[pac][tg]]
|
||||||
except:
|
except:
|
||||||
print 'osc: warn: unknown status \'%s\'...' % status[pac][tg]
|
print('osc: warn: unknown status \'%s\'...' % status[pac][tg])
|
||||||
print 'please edit osc/core.py, and extend the buildstatus_symbols dictionary.'
|
print('please edit osc/core.py, and extend the buildstatus_symbols dictionary.')
|
||||||
st = '?'
|
st = '?'
|
||||||
buildstatus_symbols[status[pac][tg]] = '?'
|
buildstatus_symbols[status[pac][tg]] = '?'
|
||||||
line.append(st)
|
line.append(st)
|
||||||
@ -5157,7 +5159,7 @@ def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=
|
|||||||
raise oscerr.OscIOError(None, 'Content-Length is empty for %s, protocol violation' % url)
|
raise oscerr.OscIOError(None, 'Content-Length is empty for %s, protocol violation' % url)
|
||||||
retries = retries + 1
|
retries = retries + 1
|
||||||
if retries > 1 and conf.config['http_debug']:
|
if retries > 1 and conf.config['http_debug']:
|
||||||
print >>sys.stderr, '\n\nRetry %d --' % (retries - 1), url
|
print('\n\nRetry %d --' % (retries - 1), url, file=sys.stderr)
|
||||||
f = http_meth.__call__(url, data = data)
|
f = http_meth.__call__(url, data = data)
|
||||||
cl = f.info().get('Content-Length')
|
cl = f.info().get('Content-Length')
|
||||||
|
|
||||||
@ -5316,7 +5318,7 @@ def print_jobhistory(apiurl, prj, current_package, repository, arch, format = 't
|
|||||||
root = ET.parse(f).getroot()
|
root = ET.parse(f).getroot()
|
||||||
|
|
||||||
if format == 'text':
|
if format == 'text':
|
||||||
print "time package reason code build time worker"
|
print("time package reason code build time worker")
|
||||||
for node in root.findall('jobhist'):
|
for node in root.findall('jobhist'):
|
||||||
package = node.get('package')
|
package = node.get('package')
|
||||||
worker = node.get('workerid')
|
worker = node.get('workerid')
|
||||||
@ -5339,9 +5341,9 @@ def print_jobhistory(apiurl, prj, current_package, repository, arch, format = 't
|
|||||||
waitbuild = " %2dm %2ds" % (waittm.tm_min, waittm.tm_sec)
|
waitbuild = " %2dm %2ds" % (waittm.tm_min, waittm.tm_sec)
|
||||||
|
|
||||||
if format == 'csv':
|
if format == 'csv':
|
||||||
print '%s|%s|%s|%s|%s|%s' % (endtime, package, reason, code, waitbuild, worker)
|
print('%s|%s|%s|%s|%s|%s' % (endtime, package, reason, code, waitbuild, worker))
|
||||||
else:
|
else:
|
||||||
print '%s %-50s %-16s %-16s %-16s %-16s' % (endtime, package[0:49], reason[0:15], code[0:15], waitbuild, worker)
|
print('%s %-50s %-16s %-16s %-16s %-16s' % (endtime, package[0:49], reason[0:15], code[0:15], waitbuild, worker))
|
||||||
|
|
||||||
|
|
||||||
def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = False, deleted = False, revision_upper=None):
|
def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = False, deleted = False, revision_upper=None):
|
||||||
@ -5627,7 +5629,7 @@ def parseRevisionOption(string):
|
|||||||
int(i)
|
int(i)
|
||||||
return splitted_rev
|
return splitted_rev
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print >>sys.stderr, 'your revision \'%s\' will be ignored' % string
|
print('your revision \'%s\' will be ignored' % string, file=sys.stderr)
|
||||||
return None, None
|
return None, None
|
||||||
else:
|
else:
|
||||||
if string.isdigit():
|
if string.isdigit():
|
||||||
@ -5636,7 +5638,7 @@ def parseRevisionOption(string):
|
|||||||
# could be an md5sum
|
# could be an md5sum
|
||||||
return string, None
|
return string, None
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, 'your revision \'%s\' will be ignored' % string
|
print('your revision \'%s\' will be ignored' % string, file=sys.stderr)
|
||||||
return None, None
|
return None, None
|
||||||
else:
|
else:
|
||||||
return None, None
|
return None, None
|
||||||
@ -5858,7 +5860,7 @@ def unpack_srcrpm(srpm, dir, *files):
|
|||||||
only this files will be unpacked.
|
only this files will be unpacked.
|
||||||
"""
|
"""
|
||||||
if not is_srcrpm(srpm):
|
if not is_srcrpm(srpm):
|
||||||
print >>sys.stderr, 'error - \'%s\' is not a source rpm.' % srpm
|
print('error - \'%s\' is not a source rpm.' % srpm, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
curdir = os.getcwd()
|
curdir = os.getcwd()
|
||||||
if os.path.isdir(dir):
|
if os.path.isdir(dir):
|
||||||
@ -5866,7 +5868,7 @@ def unpack_srcrpm(srpm, dir, *files):
|
|||||||
cmd = 'rpm2cpio %s | cpio -i %s &> /dev/null' % (srpm, ' '.join(files))
|
cmd = 'rpm2cpio %s | cpio -i %s &> /dev/null' % (srpm, ' '.join(files))
|
||||||
ret = run_external(cmd, shell=True)
|
ret = run_external(cmd, shell=True)
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
print >>sys.stderr, 'error \'%s\' - cannot extract \'%s\'' % (ret, srpm)
|
print('error \'%s\' - cannot extract \'%s\'' % (ret, srpm), file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
os.chdir(curdir)
|
os.chdir(curdir)
|
||||||
|
|
||||||
@ -5920,17 +5922,17 @@ def addPerson(apiurl, prj, pac, user, role="maintainer"):
|
|||||||
for person in root.getiterator('person'):
|
for person in root.getiterator('person'):
|
||||||
if person.get('userid') == user and person.get('role') == role:
|
if person.get('userid') == user and person.get('role') == role:
|
||||||
found = True
|
found = True
|
||||||
print "user already exists"
|
print("user already exists")
|
||||||
break
|
break
|
||||||
if not found:
|
if not found:
|
||||||
# the xml has a fixed structure
|
# the xml has a fixed structure
|
||||||
root.insert(2, ET.Element('person', role=role, userid=user))
|
root.insert(2, ET.Element('person', role=role, userid=user))
|
||||||
print 'user \'%s\' added to \'%s\'' % (user, pac or prj)
|
print('user \'%s\' added to \'%s\'' % (user, pac or prj))
|
||||||
edit_meta(metatype=kind,
|
edit_meta(metatype=kind,
|
||||||
path_args=path,
|
path_args=path,
|
||||||
data=ET.tostring(root))
|
data=ET.tostring(root))
|
||||||
else:
|
else:
|
||||||
print "osc: an error occured"
|
print("osc: an error occured")
|
||||||
|
|
||||||
def delMaintainer(apiurl, prj, pac, user):
|
def delMaintainer(apiurl, prj, pac, user):
|
||||||
# for backward compatibility only
|
# for backward compatibility only
|
||||||
@ -5954,15 +5956,15 @@ def delPerson(apiurl, prj, pac, user, role="maintainer"):
|
|||||||
if person.get('userid') == user and person.get('role') == role:
|
if person.get('userid') == user and person.get('role') == role:
|
||||||
root.remove(person)
|
root.remove(person)
|
||||||
found = True
|
found = True
|
||||||
print "user \'%s\' removed" % user
|
print("user \'%s\' removed" % user)
|
||||||
if found:
|
if found:
|
||||||
edit_meta(metatype=kind,
|
edit_meta(metatype=kind,
|
||||||
path_args=path,
|
path_args=path,
|
||||||
data=ET.tostring(root))
|
data=ET.tostring(root))
|
||||||
else:
|
else:
|
||||||
print "user \'%s\' not found in \'%s\'" % (user, pac or prj)
|
print("user \'%s\' not found in \'%s\'" % (user, pac or prj))
|
||||||
else:
|
else:
|
||||||
print "an error occured"
|
print("an error occured")
|
||||||
|
|
||||||
def setBugowner(apiurl, prj, pac, user=None, group=None):
|
def setBugowner(apiurl, prj, pac, user=None, group=None):
|
||||||
""" delete all bugowners (user and group entries) and set one new one in a package or project """
|
""" delete all bugowners (user and group entries) and set one new one in a package or project """
|
||||||
@ -5988,7 +5990,7 @@ def setBugowner(apiurl, prj, pac, user=None, group=None):
|
|||||||
elif group:
|
elif group:
|
||||||
root.insert(2, ET.Element('group', role='bugowner', groupid=group))
|
root.insert(2, ET.Element('group', role='bugowner', groupid=group))
|
||||||
else:
|
else:
|
||||||
print "Neither user nor group is specified"
|
print("Neither user nor group is specified")
|
||||||
edit_meta(metatype=kind,
|
edit_meta(metatype=kind,
|
||||||
path_args=path,
|
path_args=path,
|
||||||
data=ET.tostring(root))
|
data=ET.tostring(root))
|
||||||
@ -6020,7 +6022,7 @@ def setDevelProject(apiurl, prj, pac, dprj, dpkg=None):
|
|||||||
path_args=path,
|
path_args=path,
|
||||||
data=ET.tostring(root))
|
data=ET.tostring(root))
|
||||||
else:
|
else:
|
||||||
print "osc: an error occured"
|
print("osc: an error occured")
|
||||||
|
|
||||||
def createPackageDir(pathname, prj_obj=None):
|
def createPackageDir(pathname, prj_obj=None):
|
||||||
"""
|
"""
|
||||||
@ -6034,7 +6036,7 @@ def createPackageDir(pathname, prj_obj=None):
|
|||||||
prj = prj_obj or Project(prj_dir, False)
|
prj = prj_obj or Project(prj_dir, False)
|
||||||
Package.init_package(prj.apiurl, prj.name, pac_dir, pac_dir)
|
Package.init_package(prj.apiurl, prj.name, pac_dir, pac_dir)
|
||||||
prj.addPackage(pac_dir)
|
prj.addPackage(pac_dir)
|
||||||
print statfrmt('A', os.path.normpath(pathname))
|
print(statfrmt('A', os.path.normpath(pathname)))
|
||||||
else:
|
else:
|
||||||
raise oscerr.OscIOError(None, 'file or directory \'%s\' already exists' % pathname)
|
raise oscerr.OscIOError(None, 'file or directory \'%s\' already exists' % pathname)
|
||||||
else:
|
else:
|
||||||
@ -6137,7 +6139,7 @@ def addFiles(filenames, prj_obj = None):
|
|||||||
raise oscerr.WrongArgs('osc: cannot add a directory to a project unless ' \
|
raise oscerr.WrongArgs('osc: cannot add a directory to a project unless ' \
|
||||||
'\'do_package_tracking\' is enabled in the configuration file')
|
'\'do_package_tracking\' is enabled in the configuration file')
|
||||||
elif os.path.isdir(filename):
|
elif os.path.isdir(filename):
|
||||||
print 'skipping directory \'%s\'' % filename
|
print('skipping directory \'%s\'' % filename)
|
||||||
pacs.remove(filename)
|
pacs.remove(filename)
|
||||||
pacs = findpacs(pacs)
|
pacs = findpacs(pacs)
|
||||||
for pac in pacs:
|
for pac in pacs:
|
||||||
@ -6145,19 +6147,19 @@ def addFiles(filenames, prj_obj = None):
|
|||||||
prj = prj_obj or Project(os.path.dirname(pac.absdir), False)
|
prj = prj_obj or Project(os.path.dirname(pac.absdir), False)
|
||||||
if pac.name in prj.pacs_unvers:
|
if pac.name in prj.pacs_unvers:
|
||||||
prj.addPackage(pac.name)
|
prj.addPackage(pac.name)
|
||||||
print statfrmt('A', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
|
print(statfrmt('A', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name))))
|
||||||
for filename in pac.filenamelist_unvers:
|
for filename in pac.filenamelist_unvers:
|
||||||
if os.path.isdir(os.path.join(pac.dir, filename)):
|
if os.path.isdir(os.path.join(pac.dir, filename)):
|
||||||
print 'skipping directory \'%s\'' % os.path.join(pac.dir, filename)
|
print('skipping directory \'%s\'' % os.path.join(pac.dir, filename))
|
||||||
else:
|
else:
|
||||||
pac.todo.append(filename)
|
pac.todo.append(filename)
|
||||||
elif pac.name in prj.pacs_have:
|
elif pac.name in prj.pacs_have:
|
||||||
print 'osc: warning: \'%s\' is already under version control' % pac.name
|
print('osc: warning: \'%s\' is already under version control' % pac.name)
|
||||||
for filename in pac.todo:
|
for filename in pac.todo:
|
||||||
if filename in pac.skipped:
|
if filename in pac.skipped:
|
||||||
continue
|
continue
|
||||||
if filename in pac.excluded:
|
if filename in pac.excluded:
|
||||||
print >>sys.stderr, 'osc: warning: \'%s\' is excluded from a working copy' % filename
|
print('osc: warning: \'%s\' is excluded from a working copy' % filename, file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
pac.addfile(filename)
|
pac.addfile(filename)
|
||||||
|
|
||||||
@ -6281,11 +6283,11 @@ def print_request_list(apiurl, project, package = None, states = ('new','review'
|
|||||||
requests = get_request_list(apiurl, project, package, req_state=states)
|
requests = get_request_list(apiurl, project, package, req_state=states)
|
||||||
msg = 'Pending requests for %s: %s (%s)'
|
msg = 'Pending requests for %s: %s (%s)'
|
||||||
if package is None and len(requests):
|
if package is None and len(requests):
|
||||||
print msg % ('project', project, len(requests))
|
print(msg % ('project', project, len(requests)))
|
||||||
elif len(requests):
|
elif len(requests):
|
||||||
print msg % ('package', '/'.join([project, package]), len(requests))
|
print(msg % ('package', '/'.join([project, package]), len(requests)))
|
||||||
for r in requests:
|
for r in requests:
|
||||||
print r.list_view(), '\n'
|
print(r.list_view(), '\n')
|
||||||
|
|
||||||
def request_interactive_review(apiurl, request, initial_cmd='', group=None, ignore_reviews=False):
|
def request_interactive_review(apiurl, request, initial_cmd='', group=None, ignore_reviews=False):
|
||||||
"""review the request interactively"""
|
"""review the request interactively"""
|
||||||
@ -6298,12 +6300,12 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None, igno
|
|||||||
change_request_state(*args, **kwargs)
|
change_request_state(*args, **kwargs)
|
||||||
return True
|
return True
|
||||||
except urllib2.HTTPError as e:
|
except urllib2.HTTPError as e:
|
||||||
print >>sys.stderr, 'Server returned an error:', e
|
print('Server returned an error:', e, file=sys.stderr)
|
||||||
print >>sys.stderr, 'Try -f to force the state change'
|
print('Try -f to force the state change', file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def print_request(request):
|
def print_request(request):
|
||||||
print request
|
print(request)
|
||||||
|
|
||||||
print_request(request)
|
print_request(request)
|
||||||
try:
|
try:
|
||||||
@ -6347,15 +6349,15 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None, igno
|
|||||||
run_editor(tmpfile.name)
|
run_editor(tmpfile.name)
|
||||||
print_request(request)
|
print_request(request)
|
||||||
elif repl == 's':
|
elif repl == 's':
|
||||||
print >>sys.stderr, 'skipping: #%s' % request.reqid
|
print('skipping: #%s' % request.reqid, file=sys.stderr)
|
||||||
break
|
break
|
||||||
elif repl == 'c':
|
elif repl == 'c':
|
||||||
print >>sys.stderr, 'Aborting'
|
print('Aborting', file=sys.stderr)
|
||||||
raise oscerr.UserAbort()
|
raise oscerr.UserAbort()
|
||||||
elif repl == 'b' and src_actions:
|
elif repl == 'b' and src_actions:
|
||||||
for action in src_actions:
|
for action in src_actions:
|
||||||
print '%s/%s:' % (action.src_project, action.src_package)
|
print('%s/%s:' % (action.src_project, action.src_package))
|
||||||
print '\n'.join(get_results(apiurl, action.src_project, action.src_package))
|
print('\n'.join(get_results(apiurl, action.src_project, action.src_package)))
|
||||||
elif repl == 'e' and sr_actions:
|
elif repl == 'e' and sr_actions:
|
||||||
# this is only for sr_actions
|
# this is only for sr_actions
|
||||||
if not editprj:
|
if not editprj:
|
||||||
@ -6369,7 +6371,7 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None, igno
|
|||||||
state_map = {'a': 'accepted', 'd': 'declined', 'r': 'revoked'}
|
state_map = {'a': 'accepted', 'd': 'declined', 'r': 'revoked'}
|
||||||
mo = re.search('^([adrl])(?:\s+(-f)?\s*-m\s+(.*))?$', repl)
|
mo = re.search('^([adrl])(?:\s+(-f)?\s*-m\s+(.*))?$', repl)
|
||||||
if mo is None or orequest and mo.group(1) != 'a':
|
if mo is None or orequest and mo.group(1) != 'a':
|
||||||
print >>sys.stderr, 'invalid choice: \'%s\'' % repl
|
print('invalid choice: \'%s\'' % repl, file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
state = state_map.get(mo.group(1))
|
state = state_map.get(mo.group(1))
|
||||||
force = mo.group(2) is not None
|
force = mo.group(2) is not None
|
||||||
@ -6417,18 +6419,18 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None, igno
|
|||||||
if len(group_reviews) == 1 and conf.config['review_inherit_group']:
|
if len(group_reviews) == 1 and conf.config['review_inherit_group']:
|
||||||
review = group_reviews[0]
|
review = group_reviews[0]
|
||||||
else:
|
else:
|
||||||
print 'Please chose one of the following reviews:'
|
print('Please chose one of the following reviews:')
|
||||||
for i in range(len(reviews)):
|
for i in range(len(reviews)):
|
||||||
fmt = Request.format_review(reviews[i])
|
fmt = Request.format_review(reviews[i])
|
||||||
print '(%i)' % i, 'by %(type)-10s %(by)s' % fmt
|
print('(%i)' % i, 'by %(type)-10s %(by)s' % fmt)
|
||||||
num = raw_input('> ')
|
num = raw_input('> ')
|
||||||
try:
|
try:
|
||||||
num = int(num)
|
num = int(num)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print '\'%s\' is not a number.' % num
|
print('\'%s\' is not a number.' % num)
|
||||||
continue
|
continue
|
||||||
if num < 0 or num >= len(reviews):
|
if num < 0 or num >= len(reviews):
|
||||||
print 'number \'%s\' out of range.' % num
|
print('number \'%s\' out of range.' % num)
|
||||||
continue
|
continue
|
||||||
review = reviews[num]
|
review = reviews[num]
|
||||||
change_review_state(apiurl, request.reqid, state, by_user=review.by_user,
|
change_review_state(apiurl, request.reqid, state, by_user=review.by_user,
|
||||||
@ -6448,10 +6450,10 @@ def edit_submitrequest(apiurl, project, orequest, new_request=None):
|
|||||||
actions = new_request.get_actions('submit')
|
actions = new_request.get_actions('submit')
|
||||||
num = 0
|
num = 0
|
||||||
if len(actions) > 1:
|
if len(actions) > 1:
|
||||||
print 'Please chose one of the following submit actions:'
|
print('Please chose one of the following submit actions:')
|
||||||
for i in range(len(actions)):
|
for i in range(len(actions)):
|
||||||
fmt = Request.format_action(actions[i])
|
fmt = Request.format_action(actions[i])
|
||||||
print '(%i)' % i, '%(source)s %(target)s' % fmt
|
print('(%i)' % i, '%(source)s %(target)s' % fmt)
|
||||||
num = raw_input('> ')
|
num = raw_input('> ')
|
||||||
try:
|
try:
|
||||||
num = int(num)
|
num = int(num)
|
||||||
@ -6472,8 +6474,8 @@ def edit_submitrequest(apiurl, project, orequest, new_request=None):
|
|||||||
shell = os.getenv('SHELL', default='/bin/sh')
|
shell = os.getenv('SHELL', default='/bin/sh')
|
||||||
olddir = os.getcwd()
|
olddir = os.getcwd()
|
||||||
os.chdir(tmpdir)
|
os.chdir(tmpdir)
|
||||||
print 'Checked out package \'%s\' to %s. Started a new shell (%s).\n' \
|
print('Checked out package \'%s\' to %s. Started a new shell (%s).\n' \
|
||||||
'Please fix the package and close the shell afterwards.' % (package, tmpdir, shell)
|
'Please fix the package and close the shell afterwards.' % (package, tmpdir, shell))
|
||||||
run_external(shell)
|
run_external(shell)
|
||||||
# the pkg might have uncommitted changes...
|
# the pkg might have uncommitted changes...
|
||||||
cleanup = False
|
cleanup = False
|
||||||
@ -6482,8 +6484,8 @@ def edit_submitrequest(apiurl, project, orequest, new_request=None):
|
|||||||
p = Package(tmpdir)
|
p = Package(tmpdir)
|
||||||
modified = p.get_status(False, ' ', '?', 'S')
|
modified = p.get_status(False, ' ', '?', 'S')
|
||||||
if modified:
|
if modified:
|
||||||
print 'Your working copy has the following modifications:'
|
print('Your working copy has the following modifications:')
|
||||||
print '\n'.join([statfrmt(st, filename) for st, filename in modified])
|
print('\n'.join([statfrmt(st, filename) for st, filename in modified]))
|
||||||
repl = raw_input('Do you want to commit the local changes first? (y|N) ')
|
repl = raw_input('Do you want to commit the local changes first? (y|N) ')
|
||||||
if repl in ('y', 'Y'):
|
if repl in ('y', 'Y'):
|
||||||
msg = get_commit_msg(p.absdir, [p])
|
msg = get_commit_msg(p.absdir, [p])
|
||||||
@ -6493,7 +6495,7 @@ def edit_submitrequest(apiurl, project, orequest, new_request=None):
|
|||||||
if cleanup:
|
if cleanup:
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
else:
|
else:
|
||||||
print 'Please remove the dir \'%s\' manually' % tmpdir
|
print('Please remove the dir \'%s\' manually' % tmpdir)
|
||||||
r = Request()
|
r = Request()
|
||||||
for action in orequest.get_actions():
|
for action in orequest.get_actions():
|
||||||
new_action = Action.from_xml(action.to_xml())
|
new_action = Action.from_xml(action.to_xml())
|
||||||
|
70
osc/fetch.py
70
osc/fetch.py
@ -3,6 +3,8 @@
|
|||||||
# and distributed under the terms of the GNU General Public Licence,
|
# and distributed under the terms of the GNU General Public Licence,
|
||||||
# either version 2, or (at your option) any later version.
|
# either version 2, or (at your option) any later version.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys, os
|
import sys, os
|
||||||
import urllib2
|
import urllib2
|
||||||
from urllib import quote_plus
|
from urllib import quote_plus
|
||||||
@ -83,8 +85,8 @@ class Fetcher:
|
|||||||
"""failure output for failovers from urlgrabber"""
|
"""failure output for failovers from urlgrabber"""
|
||||||
if errobj.url.startswith('file://'):
|
if errobj.url.startswith('file://'):
|
||||||
return {}
|
return {}
|
||||||
print 'Trying openSUSE Build Service server for %s (%s), not found at %s.' \
|
print('Trying openSUSE Build Service server for %s (%s), not found at %s.' \
|
||||||
% (self.curpac, self.curpac.project, errobj.url.split('/')[2])
|
% (self.curpac, self.curpac.project, errobj.url.split('/')[2]))
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def __add_cpio(self, pac):
|
def __add_cpio(self, pac):
|
||||||
@ -157,9 +159,9 @@ class Fetcher:
|
|||||||
mg = MirrorGroup(self.gr, pac.urllist, failure_callback=(self.failureReport,(),{}))
|
mg = MirrorGroup(self.gr, pac.urllist, failure_callback=(self.failureReport,(),{}))
|
||||||
|
|
||||||
if self.http_debug:
|
if self.http_debug:
|
||||||
print >>sys.stderr, '\nURLs to try for package \'%s\':' % pac
|
print('\nURLs to try for package \'%s\':' % pac, file=sys.stderr)
|
||||||
print >>sys.stderr, '\n'.join(pac.urllist)
|
print('\n'.join(pac.urllist), file=sys.stderr)
|
||||||
print >>sys.stderr
|
print(file=sys.stderr)
|
||||||
|
|
||||||
(fd, tmpfile) = tempfile.mkstemp(prefix='osc_build')
|
(fd, tmpfile) = tempfile.mkstemp(prefix='osc_build')
|
||||||
try:
|
try:
|
||||||
@ -172,10 +174,10 @@ class Fetcher:
|
|||||||
if self.enable_cpio and e.errno == 256:
|
if self.enable_cpio and e.errno == 256:
|
||||||
self.__add_cpio(pac)
|
self.__add_cpio(pac)
|
||||||
return
|
return
|
||||||
print
|
print()
|
||||||
print >>sys.stderr, 'Error:', e.strerror
|
print('Error:', e.strerror, file=sys.stderr)
|
||||||
print >>sys.stderr, 'Failed to retrieve %s from the following locations (in order):' % pac.filename
|
print('Failed to retrieve %s from the following locations (in order):' % pac.filename, file=sys.stderr)
|
||||||
print >>sys.stderr, '\n'.join(pac.urllist)
|
print('\n'.join(pac.urllist), file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
finally:
|
finally:
|
||||||
os.close(fd)
|
os.close(fd)
|
||||||
@ -189,7 +191,7 @@ class Fetcher:
|
|||||||
canonname = pkgq.canonname()
|
canonname = pkgq.canonname()
|
||||||
else:
|
else:
|
||||||
if pac_obj is None:
|
if pac_obj is None:
|
||||||
print >>sys.stderr, 'Unsupported file type: ', tmpfile
|
print('Unsupported file type: ', tmpfile, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
canonname = pac_obj.binary
|
canonname = pac_obj.binary
|
||||||
|
|
||||||
@ -206,8 +208,8 @@ class Fetcher:
|
|||||||
try:
|
try:
|
||||||
os.makedirs(dir, mode=0o755)
|
os.makedirs(dir, mode=0o755)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
print >>sys.stderr, 'packagecachedir is not writable for you?'
|
print('packagecachedir is not writable for you?', file=sys.stderr)
|
||||||
print >>sys.stderr, e
|
print(e, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def run(self, buildinfo):
|
def run(self, buildinfo):
|
||||||
@ -221,7 +223,7 @@ class Fetcher:
|
|||||||
needed = all - cached
|
needed = all - cached
|
||||||
if all:
|
if all:
|
||||||
miss = 100.0 * needed / all
|
miss = 100.0 * needed / all
|
||||||
print "%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all)
|
print("%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all))
|
||||||
done = 1
|
done = 1
|
||||||
for i in buildinfo.deps:
|
for i in buildinfo.deps:
|
||||||
i.makeurls(self.cachedir, self.urllist)
|
i.makeurls(self.cachedir, self.urllist)
|
||||||
@ -232,15 +234,15 @@ class Fetcher:
|
|||||||
try:
|
try:
|
||||||
# if there isn't a progress bar, there is no output at all
|
# if there isn't a progress bar, there is no output at all
|
||||||
if not self.progress_obj:
|
if not self.progress_obj:
|
||||||
print '%d/%d (%s) %s' % (done, needed, i.project, i.filename)
|
print('%d/%d (%s) %s' % (done, needed, i.project, i.filename))
|
||||||
self.fetch(i)
|
self.fetch(i)
|
||||||
if self.progress_obj:
|
if self.progress_obj:
|
||||||
print " %d/%d\r" % (done, needed),
|
print(" %d/%d\r" % (done, needed), end=' ')
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print 'Cancelled by user (ctrl-c)'
|
print('Cancelled by user (ctrl-c)')
|
||||||
print 'Exiting.'
|
print('Exiting.')
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
done += 1
|
done += 1
|
||||||
|
|
||||||
@ -264,20 +266,20 @@ class Fetcher:
|
|||||||
buildinfo.keys.append(dest)
|
buildinfo.keys.append(dest)
|
||||||
buildinfo.prjkeys.append(i)
|
buildinfo.prjkeys.append(i)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print 'Cancelled by user (ctrl-c)'
|
print('Cancelled by user (ctrl-c)')
|
||||||
print 'Exiting.'
|
print('Exiting.')
|
||||||
if os.path.exists(dest):
|
if os.path.exists(dest):
|
||||||
os.unlink(dest)
|
os.unlink(dest)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
except URLGrabError as e:
|
except URLGrabError as e:
|
||||||
# Not found is okay, let's go to the next project
|
# Not found is okay, let's go to the next project
|
||||||
if e.code != 404:
|
if e.code != 404:
|
||||||
print >>sys.stderr, "Invalid answer from server", e
|
print("Invalid answer from server", e, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if self.http_debug:
|
if self.http_debug:
|
||||||
print >>sys.stderr, "can't fetch key for %s: %s" %(i, e.strerror)
|
print("can't fetch key for %s: %s" %(i, e.strerror), file=sys.stderr)
|
||||||
print >>sys.stderr, "url: %s" % url
|
print("url: %s" % url, file=sys.stderr)
|
||||||
|
|
||||||
if os.path.exists(dest):
|
if os.path.exists(dest):
|
||||||
os.unlink(dest)
|
os.unlink(dest)
|
||||||
@ -317,20 +319,20 @@ def verify_pacs_old(pac_list):
|
|||||||
for line in o.readlines():
|
for line in o.readlines():
|
||||||
|
|
||||||
if not 'OK' in line:
|
if not 'OK' in line:
|
||||||
print
|
print()
|
||||||
print >>sys.stderr, 'The following package could not be verified:'
|
print('The following package could not be verified:', file=sys.stderr)
|
||||||
print >>sys.stderr, line
|
print(line, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if 'NOT OK' in line:
|
if 'NOT OK' in line:
|
||||||
print
|
print()
|
||||||
print >>sys.stderr, 'The following package could not be verified:'
|
print('The following package could not be verified:', file=sys.stderr)
|
||||||
print >>sys.stderr, line
|
print(line, file=sys.stderr)
|
||||||
|
|
||||||
if 'MISSING KEYS' in line:
|
if 'MISSING KEYS' in line:
|
||||||
missing_key = line.split('#')[-1].split(')')[0]
|
missing_key = line.split('#')[-1].split(')')[0]
|
||||||
|
|
||||||
print >>sys.stderr, """
|
print("""
|
||||||
- If the key (%(name)s) is missing, install it first.
|
- If the key (%(name)s) is missing, install it first.
|
||||||
For example, do the following:
|
For example, do the following:
|
||||||
osc signkey PROJECT > file
|
osc signkey PROJECT > file
|
||||||
@ -343,13 +345,13 @@ def verify_pacs_old(pac_list):
|
|||||||
|
|
||||||
- You may use --no-verify to skip the verification (which is a risk for your system).
|
- You may use --no-verify to skip the verification (which is a risk for your system).
|
||||||
""" % {'name': missing_key,
|
""" % {'name': missing_key,
|
||||||
'dir': os.path.expanduser('~')}
|
'dir': os.path.expanduser('~')}, file=sys.stderr)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, """
|
print("""
|
||||||
- If the signature is wrong, you may try deleting the package manually
|
- If the signature is wrong, you may try deleting the package manually
|
||||||
and re-run this program, so it is fetched again.
|
and re-run this program, so it is fetched again.
|
||||||
"""
|
""", file=sys.stderr)
|
||||||
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -370,7 +372,7 @@ def verify_pacs(bi):
|
|||||||
if not bi.keys:
|
if not bi.keys:
|
||||||
raise oscerr.APIError("can't verify packages due to lack of GPG keys")
|
raise oscerr.APIError("can't verify packages due to lack of GPG keys")
|
||||||
|
|
||||||
print "using keys from", ', '.join(bi.prjkeys)
|
print("using keys from", ', '.join(bi.prjkeys))
|
||||||
|
|
||||||
from . import checker
|
from . import checker
|
||||||
failed = False
|
failed = False
|
||||||
@ -382,7 +384,7 @@ def verify_pacs(bi):
|
|||||||
checker.check(pkg)
|
checker.check(pkg)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
failed = True
|
failed = True
|
||||||
print pkg, ':', e
|
print(pkg, ':', e)
|
||||||
except:
|
except:
|
||||||
checker.cleanup()
|
checker.cleanup()
|
||||||
raise
|
raise
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
# it uses getScreenWidth() scrapped from smart.
|
# it uses getScreenWidth() scrapped from smart.
|
||||||
# 2007-04-24, poeml
|
# 2007-04-24, poeml
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from urlgrabber.progress import BaseMeter, format_time, format_number
|
from urlgrabber.progress import BaseMeter, format_time, format_number
|
||||||
import sys, os
|
import sys, os
|
||||||
|
@ -3,6 +3,8 @@
|
|||||||
# and distributed under the terms of the GNU General Public Licence,
|
# and distributed under the terms of the GNU General Public Licence,
|
||||||
# either version 2, or (at your option) any later version.
|
# either version 2, or (at your option) any later version.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import M2Crypto.httpslib
|
import M2Crypto.httpslib
|
||||||
from M2Crypto.SSL.Checker import SSLVerificationError
|
from M2Crypto.SSL.Checker import SSLVerificationError
|
||||||
from M2Crypto import m2, SSL
|
from M2Crypto import m2, SSL
|
||||||
@ -82,7 +84,7 @@ def verify_cb(ctx, ok, store):
|
|||||||
return 1
|
return 1
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print e
|
print(e)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
class FailCert:
|
class FailCert:
|
||||||
@ -114,12 +116,12 @@ class ValidationErrors:
|
|||||||
def show(self):
|
def show(self):
|
||||||
for depth in self.failures.keys():
|
for depth in self.failures.keys():
|
||||||
cert = self.failures[depth].cert
|
cert = self.failures[depth].cert
|
||||||
print "*** certificate verify failed at depth %d" % depth
|
print("*** certificate verify failed at depth %d" % depth)
|
||||||
print "Subject: ", cert.get_subject()
|
print("Subject: ", cert.get_subject())
|
||||||
print "Issuer: ", cert.get_issuer()
|
print("Issuer: ", cert.get_issuer())
|
||||||
print "Valid: ", cert.get_not_before(), "-", cert.get_not_after()
|
print("Valid: ", cert.get_not_before(), "-", cert.get_not_after())
|
||||||
print "Fingerprint(MD5): ", cert.get_fingerprint('md5')
|
print("Fingerprint(MD5): ", cert.get_fingerprint('md5'))
|
||||||
print "Fingerprint(SHA1): ", cert.get_fingerprint('sha1')
|
print("Fingerprint(SHA1): ", cert.get_fingerprint('sha1'))
|
||||||
|
|
||||||
for err in self.failures[depth].errs:
|
for err in self.failures[depth].errs:
|
||||||
reason = "Unknown"
|
reason = "Unknown"
|
||||||
@ -128,7 +130,7 @@ class ValidationErrors:
|
|||||||
reason = M2Crypto.Err.get_x509_verify_error(err)
|
reason = M2Crypto.Err.get_x509_verify_error(err)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
print "Reason:", reason
|
print("Reason:", reason)
|
||||||
|
|
||||||
# check if the encountered errors could be ignored
|
# check if the encountered errors could be ignored
|
||||||
def could_ignore(self):
|
def could_ignore(self):
|
||||||
@ -316,31 +318,31 @@ def verify_certificate(connection):
|
|||||||
if tc.is_trusted(): # ok, same cert as the stored one
|
if tc.is_trusted(): # ok, same cert as the stored one
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, "WARNING: REMOTE HOST IDENTIFICATION HAS CHANGED!"
|
print("WARNING: REMOTE HOST IDENTIFICATION HAS CHANGED!", file=sys.stderr)
|
||||||
print >>sys.stderr, "IT IS POSSIBLE THAT SOMEONE IS DOING SOMETHING NASTY!"
|
print("IT IS POSSIBLE THAT SOMEONE IS DOING SOMETHING NASTY!", file=sys.stderr)
|
||||||
print >>sys.stderr, "offending certificate is at '%s'" % tc.file
|
print("offending certificate is at '%s'" % tc.file, file=sys.stderr)
|
||||||
raise SSLVerificationError("remote host identification has changed")
|
raise SSLVerificationError("remote host identification has changed")
|
||||||
|
|
||||||
verrs.show()
|
verrs.show()
|
||||||
|
|
||||||
print
|
print()
|
||||||
|
|
||||||
if not verrs.could_ignore():
|
if not verrs.could_ignore():
|
||||||
raise SSLVerificationError("Certificate validation error cannot be ignored")
|
raise SSLVerificationError("Certificate validation error cannot be ignored")
|
||||||
|
|
||||||
if not verrs.chain_ok:
|
if not verrs.chain_ok:
|
||||||
print "A certificate in the chain failed verification"
|
print("A certificate in the chain failed verification")
|
||||||
if not verrs.cert_ok:
|
if not verrs.cert_ok:
|
||||||
print "The server certificate failed verification"
|
print("The server certificate failed verification")
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
print """
|
print("""
|
||||||
Would you like to
|
Would you like to
|
||||||
0 - quit (default)
|
0 - quit (default)
|
||||||
1 - continue anyways
|
1 - continue anyways
|
||||||
2 - trust the server certificate permanently
|
2 - trust the server certificate permanently
|
||||||
9 - review the server certificate
|
9 - review the server certificate
|
||||||
"""
|
""")
|
||||||
|
|
||||||
r = raw_input("Enter choice [0129]: ")
|
r = raw_input("Enter choice [0129]: ")
|
||||||
if not r or r == '0':
|
if not r or r == '0':
|
||||||
@ -353,6 +355,6 @@ Would you like to
|
|||||||
tc.trust_always()
|
tc.trust_always()
|
||||||
return
|
return
|
||||||
elif r == '9':
|
elif r == '9':
|
||||||
print cert.as_text()
|
print(cert.as_text())
|
||||||
|
|
||||||
# vim: sw=4 et
|
# vim: sw=4 et
|
||||||
|
@ -13,6 +13,8 @@
|
|||||||
# along with this program; if not, write to the Free Software
|
# along with this program; if not, write to the Free Software
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@ -171,7 +173,7 @@ class Ar:
|
|||||||
self.__file = mmap.mmap(self.__file.fileno(), os.path.getsize(self.__file.name))
|
self.__file = mmap.mmap(self.__file.fileno(), os.path.getsize(self.__file.name))
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
if e.errno == 19 or ( hasattr(e, 'winerror') and e.winerror == 5 ):
|
if e.errno == 19 or ( hasattr(e, 'winerror') and e.winerror == 5 ):
|
||||||
print >>sys.stderr, 'cannot use mmap to read the file, falling back to the default io'
|
print('cannot use mmap to read the file, falling back to the default io', file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
else:
|
else:
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import tarfile
|
import tarfile
|
||||||
@ -153,12 +156,12 @@ if __name__ == '__main__':
|
|||||||
try:
|
try:
|
||||||
archq = ArchQuery.query(sys.argv[1])
|
archq = ArchQuery.query(sys.argv[1])
|
||||||
except ArchError as e:
|
except ArchError as e:
|
||||||
print e.msg
|
print(e.msg)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
print archq.name(), archq.version(), archq.release(), archq.arch()
|
print(archq.name(), archq.version(), archq.release(), archq.arch())
|
||||||
print archq.canonname()
|
print(archq.canonname())
|
||||||
print archq.description()
|
print(archq.description())
|
||||||
print '##########'
|
print('##########')
|
||||||
print '\n'.join(archq.provides())
|
print('\n'.join(archq.provides()))
|
||||||
print '##########'
|
print('##########')
|
||||||
print '\n'.join(archq.requires())
|
print('\n'.join(archq.requires()))
|
||||||
|
@ -13,6 +13,8 @@
|
|||||||
# along with this program; if not, write to the Free Software
|
# along with this program; if not, write to the Free Software
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import mmap
|
import mmap
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
@ -152,7 +154,7 @@ class CpioRead:
|
|||||||
self.__file = mmap.mmap(self.__file.fileno(), os.path.getsize(self.__file.name))
|
self.__file = mmap.mmap(self.__file.fileno(), os.path.getsize(self.__file.name))
|
||||||
except EnvironmentError as e:
|
except EnvironmentError as e:
|
||||||
if e.errno == 19 or ( hasattr(e, 'winerror') and e.winerror == 5 ):
|
if e.errno == 19 or ( hasattr(e, 'winerror') and e.winerror == 5 ):
|
||||||
print >>sys.stderr, 'cannot use mmap to read the file, failing back to default'
|
print('cannot use mmap to read the file, failing back to default', file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
else:
|
else:
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from . import ar
|
from . import ar
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
@ -168,11 +171,11 @@ if __name__ == '__main__':
|
|||||||
try:
|
try:
|
||||||
debq = DebQuery.query(sys.argv[1])
|
debq = DebQuery.query(sys.argv[1])
|
||||||
except DebError as e:
|
except DebError as e:
|
||||||
print e.msg
|
print(e.msg)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
print debq.name(), debq.version(), debq.release(), debq.arch()
|
print(debq.name(), debq.version(), debq.release(), debq.arch())
|
||||||
print debq.description()
|
print(debq.description())
|
||||||
print '##########'
|
print('##########')
|
||||||
print '\n'.join(debq.provides())
|
print('\n'.join(debq.provides()))
|
||||||
print '##########'
|
print('##########')
|
||||||
print '\n'.join(debq.requires())
|
print('\n'.join(debq.requires()))
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
class PackageError(Exception):
|
class PackageError(Exception):
|
||||||
"""base class for all package related errors"""
|
"""base class for all package related errors"""
|
||||||
def __init__(self, fname, msg):
|
def __init__(self, fname, msg):
|
||||||
@ -115,13 +118,13 @@ if __name__ == '__main__':
|
|||||||
try:
|
try:
|
||||||
pkgq = PackageQuery.query(sys.argv[1])
|
pkgq = PackageQuery.query(sys.argv[1])
|
||||||
except PackageError as e:
|
except PackageError as e:
|
||||||
print e.msg
|
print(e.msg)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
print pkgq.name()
|
print(pkgq.name())
|
||||||
print pkgq.version()
|
print(pkgq.version())
|
||||||
print pkgq.release()
|
print(pkgq.release())
|
||||||
print pkgq.description()
|
print(pkgq.description())
|
||||||
print '##########'
|
print('##########')
|
||||||
print '\n'.join(pkgq.provides())
|
print('\n'.join(pkgq.provides()))
|
||||||
print '##########'
|
print('##########')
|
||||||
print '\n'.join(pkgq.requires())
|
print('\n'.join(pkgq.requires()))
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import struct
|
import struct
|
||||||
@ -313,12 +316,12 @@ if __name__ == '__main__':
|
|||||||
try:
|
try:
|
||||||
rpmq = RpmQuery.query(sys.argv[1])
|
rpmq = RpmQuery.query(sys.argv[1])
|
||||||
except RpmError as e:
|
except RpmError as e:
|
||||||
print e.msg
|
print(e.msg)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
print rpmq.name(), rpmq.version(), rpmq.release(), rpmq.arch(), rpmq.url()
|
print(rpmq.name(), rpmq.version(), rpmq.release(), rpmq.arch(), rpmq.url())
|
||||||
print rpmq.summary()
|
print(rpmq.summary())
|
||||||
print rpmq.description()
|
print(rpmq.description())
|
||||||
print '##########'
|
print('##########')
|
||||||
print '\n'.join(rpmq.provides())
|
print('\n'.join(rpmq.provides()))
|
||||||
print '##########'
|
print('##########')
|
||||||
print '\n'.join(rpmq.requires())
|
print('\n'.join(rpmq.requires()))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user