mirror of
https://github.com/openSUSE/osc.git
synced 2025-01-27 07:06:13 +01:00
Merge branch 'master' into python3_fetch_module
This commit is contained in:
commit
2693d24a0a
@ -16,6 +16,7 @@ from osc import oscerr
|
||||
from .oscsslexcp import NoSecureSSLError
|
||||
from osc.util.cpio import CpioError
|
||||
from osc.util.packagequery import PackageError
|
||||
from osc.util.helper import decode_it
|
||||
|
||||
try:
|
||||
from M2Crypto.SSL.Checker import SSLVerificationError
|
||||
@ -112,11 +113,11 @@ def run(prg, argv=None):
|
||||
print(body, file=sys.stderr)
|
||||
|
||||
if e.code in [400, 403, 404, 500]:
|
||||
if '<summary>' in body:
|
||||
msg = body.split('<summary>')[1]
|
||||
msg = msg.split('</summary>')[0]
|
||||
msg = msg.replace('<', '<').replace('>' , '>').replace('&', '&')
|
||||
print(msg, file=sys.stderr)
|
||||
if b'<summary>' in body:
|
||||
msg = body.split(b'<summary>')[1]
|
||||
msg = msg.split(b'</summary>')[0]
|
||||
msg = msg.replace(b'<', b'<').replace(b'>' , b'>').replace(b'&', b'&')
|
||||
print(decode_it(msg), file=sys.stderr)
|
||||
if e.code >= 500 and e.code <= 599:
|
||||
print('\nRequest: %s' % e.filename)
|
||||
print('Headers:')
|
||||
|
64
osc/build.py
64
osc/build.py
@ -23,6 +23,7 @@ from osc.fetch import *
|
||||
from osc.core import get_buildinfo, store_read_apiurl, store_read_project, store_read_package, meta_exists, quote_plus, get_buildconfig, is_package_dir, dgst
|
||||
from osc.core import get_binarylist, get_binary_file, run_external, return_external, raw_input
|
||||
from osc.util import rpmquery, debquery, archquery
|
||||
from osc.util.helper import decode_it
|
||||
import osc.conf
|
||||
from . import oscerr
|
||||
import subprocess
|
||||
@ -440,11 +441,11 @@ def get_prefer_pkgs(dirs, wanted_arch, type, cpio):
|
||||
packageQuery = packagequery.PackageQuery.query(path)
|
||||
packageQueries.add(packageQuery)
|
||||
|
||||
prefer_pkgs = dict((name, packageQuery.path())
|
||||
prefer_pkgs = dict((decode_it(name), packageQuery.path())
|
||||
for name, packageQuery in packageQueries.items())
|
||||
|
||||
depfile = create_deps(packageQueries.values())
|
||||
cpio.add('deps', '\n'.join(depfile))
|
||||
cpio.add(b'deps', b'\n'.join(depfile))
|
||||
return prefer_pkgs
|
||||
|
||||
|
||||
@ -455,22 +456,22 @@ def create_deps(pkgqs):
|
||||
"""
|
||||
depfile = []
|
||||
for p in pkgqs:
|
||||
id = '%s.%s-0/0/0: ' % (p.name(), p.arch())
|
||||
depfile.append('P:%s%s' % (id, ' '.join(p.provides())))
|
||||
depfile.append('R:%s%s' % (id, ' '.join(p.requires())))
|
||||
id = b'%s.%s-0/0/0: ' % (p.name(), p.arch())
|
||||
depfile.append(b'P:%s%s' % (id, b' '.join(p.provides())))
|
||||
depfile.append(b'R:%s%s' % (id, b' '.join(p.requires())))
|
||||
d = p.conflicts()
|
||||
if d:
|
||||
depfile.append('C:%s%s' % (id, ' '.join(d)))
|
||||
depfile.append(b'C:%s%s' % (id, b' '.join(d)))
|
||||
d = p.obsoletes()
|
||||
if d:
|
||||
depfile.append('O:%s%s' % (id, ' '.join(d)))
|
||||
depfile.append(b'O:%s%s' % (id, b' '.join(d)))
|
||||
d = p.recommends()
|
||||
if d:
|
||||
depfile.append('r:%s%s' % (id, ' '.join(d)))
|
||||
depfile.append(b'r:%s%s' % (id, b' '.join(d)))
|
||||
d = p.supplements()
|
||||
if d:
|
||||
depfile.append('s:%s%s' % (id, ' '.join(d)))
|
||||
depfile.append('I:%s%s-%s 0-%s' % (id, p.name(), p.evr(), p.arch()))
|
||||
depfile.append(b's:%s%s' % (id, b' '.join(d)))
|
||||
depfile.append(b'I:%s%s-%s 0-%s' % (id, p.name(), p.evr().encode(), p.arch()))
|
||||
return depfile
|
||||
|
||||
|
||||
@ -513,7 +514,7 @@ def get_kiwipath_from_buildinfo(apiurl, bi_filename, prj, repo):
|
||||
kiwipath = bi.pathes
|
||||
kiwipath.insert(0, myprp)
|
||||
return kiwipath
|
||||
|
||||
|
||||
def main(apiurl, opts, argv):
|
||||
|
||||
repo = argv[0]
|
||||
@ -677,24 +678,24 @@ def main(apiurl, opts, argv):
|
||||
extra_pkgs += xp
|
||||
|
||||
prefer_pkgs = {}
|
||||
build_descr_data = open(build_descr).read()
|
||||
build_descr_data = open(build_descr, 'rb').read()
|
||||
|
||||
# XXX: dirty hack but there's no api to provide custom defines
|
||||
if opts.without:
|
||||
s = ''
|
||||
for i in opts.without:
|
||||
s += "%%define _without_%s 1\n" % i
|
||||
build_descr_data = s + build_descr_data
|
||||
build_descr_data = s.encode() + build_descr_data
|
||||
if opts._with:
|
||||
s = ''
|
||||
for i in opts._with:
|
||||
s += "%%define _with_%s 1\n" % i
|
||||
build_descr_data = s + build_descr_data
|
||||
build_descr_data = s.encode() + build_descr_data
|
||||
if opts.define:
|
||||
s = ''
|
||||
for i in opts.define:
|
||||
s += "%%define %s\n" % i
|
||||
build_descr_data = s + build_descr_data
|
||||
build_descr_data = s.encode + build_descr_data
|
||||
|
||||
cpiodata = None
|
||||
servicefile = os.path.join(os.path.dirname(build_descr), "_service")
|
||||
@ -724,12 +725,12 @@ def main(apiurl, opts, argv):
|
||||
prefer_pkgs = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type, cpiodata)
|
||||
|
||||
if cpiodata:
|
||||
cpiodata.add(os.path.basename(build_descr), build_descr_data)
|
||||
cpiodata.add(os.path.basename(build_descr.encode()), build_descr_data)
|
||||
# buildenv must come last for compatibility reasons...
|
||||
if buildenvfile:
|
||||
cpiodata.add("buildenv", open(buildenvfile).read())
|
||||
cpiodata.add(b"buildenv", open(buildenvfile, 'rb').read())
|
||||
if servicefile:
|
||||
cpiodata.add("_service", open(servicefile).read())
|
||||
cpiodata.add(b"_service", open(servicefile, 'rb').read())
|
||||
build_descr_data = cpiodata.get()
|
||||
|
||||
# special handling for overlay and rsync-src/dest
|
||||
@ -783,13 +784,14 @@ def main(apiurl, opts, argv):
|
||||
raise oscerr.WrongOptions('--offline is not possible, no local buildconfig file')
|
||||
else:
|
||||
print('Getting buildinfo from server and store to %s' % bi_filename)
|
||||
bi_text = ''.join(get_buildinfo(apiurl,
|
||||
prj,
|
||||
pac,
|
||||
repo,
|
||||
arch,
|
||||
specfile=build_descr_data,
|
||||
addlist=extra_pkgs))
|
||||
|
||||
bi_text = decode_it(get_buildinfo(apiurl,
|
||||
prj,
|
||||
pac,
|
||||
repo,
|
||||
arch,
|
||||
specfile=build_descr_data,
|
||||
addlist=extra_pkgs))
|
||||
if not bi_file:
|
||||
bi_file = open(bi_filename, 'w')
|
||||
# maybe we should check for errors before saving the file
|
||||
@ -802,7 +804,7 @@ def main(apiurl, opts, argv):
|
||||
bc = get_buildconfig(apiurl, prj, repo, kiwipath)
|
||||
if not bc_file:
|
||||
bc_file = open(bc_filename, 'w')
|
||||
bc_file.write(bc)
|
||||
bc_file.write(decode_it(bc))
|
||||
bc_file.flush()
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
@ -833,7 +835,7 @@ def main(apiurl, opts, argv):
|
||||
# Set default binary type if cannot be detected
|
||||
binary_type = 'rpm'
|
||||
if os.path.exists('/usr/lib/build/queryconfig'):
|
||||
binary_type = return_external('/usr/lib/build/queryconfig', '--dist', bc_filename, 'binarytype').decode('utf-8').strip()
|
||||
binary_type = decode_it(return_external('/usr/lib/build/queryconfig', '--dist', bc_filename, 'binarytype')).strip()
|
||||
# If binary type is set to a useless value, reset to 'rpm'
|
||||
if binary_type == 'UNDEFINED':
|
||||
binary_type = 'rpm'
|
||||
@ -1161,7 +1163,7 @@ def main(apiurl, opts, argv):
|
||||
if bi.installonly_list:
|
||||
rpmlist.append('installonly: ' + ' '.join(bi.installonly_list) + '\n')
|
||||
|
||||
rpmlist_file = NamedTemporaryFile(prefix='rpmlist.')
|
||||
rpmlist_file = NamedTemporaryFile(mode='w+t', prefix='rpmlist.')
|
||||
rpmlist_filename = rpmlist_file.name
|
||||
rpmlist_file.writelines(rpmlist)
|
||||
rpmlist_file.flush()
|
||||
@ -1261,13 +1263,13 @@ def main(apiurl, opts, argv):
|
||||
(s_built, b_built) = get_built_files(pacdir, bi.buildtype)
|
||||
|
||||
print()
|
||||
if s_built: print(s_built)
|
||||
if s_built: print(decode_it(s_built))
|
||||
print()
|
||||
print(b_built)
|
||||
print(decode_it(b_built))
|
||||
|
||||
if opts.keep_pkgs:
|
||||
for i in b_built.splitlines() + s_built.splitlines():
|
||||
shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(i)))
|
||||
shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(decode_it(i))))
|
||||
|
||||
if bi_file:
|
||||
bi_file.close()
|
||||
|
20
osc/cmdln.py
20
osc/cmdln.py
@ -621,16 +621,16 @@ class RawCmdln(cmd.Cmd):
|
||||
${name} man
|
||||
"""
|
||||
mandate = datetime.utcfromtimestamp(int(os.environ.get('SOURCE_DATE_EPOCH', time.time())))
|
||||
self.stdout.write(bytes(
|
||||
self.stdout.write(
|
||||
self.man_header % {
|
||||
'date': mandate.strftime('%b %Y'),
|
||||
'version': self.get_version(),
|
||||
'name': self.name,
|
||||
'ucname': self.name.upper()
|
||||
},
|
||||
"utf-8"))
|
||||
}
|
||||
)
|
||||
|
||||
self.stdout.write(bytes(self.man_commands_header, "utf-8"))
|
||||
self.stdout.write(self.man_commands_header)
|
||||
commands = self._help_get_command_list()
|
||||
for command, doc in commands:
|
||||
cmdname = command.split(' ')[0]
|
||||
@ -641,14 +641,14 @@ class RawCmdln(cmd.Cmd):
|
||||
line = line[8:]
|
||||
lines.append(man_escape(line))
|
||||
|
||||
self.stdout.write(bytes(
|
||||
'.TP\n\\fB%s\\fR\n%s\n' % (command, '\n'.join(lines)), "utf-8"))
|
||||
self.stdout.write(
|
||||
'.TP\n\\fB%s\\fR\n%s\n' % (command, '\n'.join(lines)))
|
||||
|
||||
self.stdout.write(bytes(self.man_options_header, "utf-8"))
|
||||
self.stdout.write(bytes(
|
||||
man_escape(self._help_preprocess('${option_list}', None)), "utf-8"))
|
||||
self.stdout.write(self.man_options_header)
|
||||
self.stdout.write(
|
||||
man_escape(self._help_preprocess('${option_list}', None)))
|
||||
|
||||
self.stdout.write(bytes(self.man_footer, "utf-8"))
|
||||
self.stdout.write(self.man_footer)
|
||||
|
||||
self.stdout.flush()
|
||||
|
||||
|
@ -29,6 +29,11 @@ from optparse import SUPPRESS_HELP
|
||||
from .core import *
|
||||
from .util import safewriter
|
||||
|
||||
try:
|
||||
from functools import cmp_to_key
|
||||
except ImportError:
|
||||
from .util.helper import cmp_to_key
|
||||
|
||||
MAN_HEADER = r""".TH %(ucname)s "1" "%(date)s" "%(name)s %(version)s" "User Commands"
|
||||
.SH NAME
|
||||
%(name)s \- openSUSE build service command-line tool.
|
||||
@ -435,7 +440,8 @@ class Osc(cmdln.Cmdln):
|
||||
break
|
||||
m = show_files_meta(apiurl, project, package)
|
||||
li = Linkinfo()
|
||||
li.read(ET.fromstring(''.join(m)).find('linkinfo'))
|
||||
root = ET.fromstring(m)
|
||||
li.read(root.find('linkinfo'))
|
||||
if li.haserror():
|
||||
raise oscerr.LinkExpandError(project, package, li.error)
|
||||
project, package, rev = li.project, li.package, li.rev
|
||||
@ -753,7 +759,7 @@ class Osc(cmdln.Cmdln):
|
||||
buf = f.read(16384)
|
||||
if not buf:
|
||||
break
|
||||
sys.stdout.write(buf)
|
||||
sys.stdout.write(decode_it(buf))
|
||||
|
||||
elif opts.delete:
|
||||
print("Delete token")
|
||||
@ -773,7 +779,7 @@ class Osc(cmdln.Cmdln):
|
||||
raise oscerr.WrongArgs("Did you mean --" + args[0] + "?")
|
||||
# just list token
|
||||
for data in streamfile(url, http_GET):
|
||||
sys.stdout.write(data)
|
||||
sys.stdout.write(decode_it(data))
|
||||
|
||||
|
||||
@cmdln.option('-a', '--attribute', metavar='ATTRIBUTE',
|
||||
@ -938,22 +944,22 @@ class Osc(cmdln.Cmdln):
|
||||
# show
|
||||
if not opts.edit and not opts.file and not opts.delete and not opts.create and not opts.set:
|
||||
if cmd == 'prj':
|
||||
sys.stdout.write(''.join(show_project_meta(apiurl, project, rev=opts.revision, blame=opts.blame)))
|
||||
sys.stdout.write(decode_it(b''.join(show_project_meta(apiurl, project, rev=opts.revision, blame=opts.blame))))
|
||||
elif cmd == 'pkg':
|
||||
sys.stdout.write(''.join(show_package_meta(apiurl, project, package, blame=opts.blame)))
|
||||
sys.stdout.write(decode_it(b''.join(show_package_meta(apiurl, project, package, blame=opts.blame))))
|
||||
elif cmd == 'attribute':
|
||||
sys.stdout.write(''.join(show_attribute_meta(apiurl, project, package, subpackage,
|
||||
opts.attribute, opts.attribute_defaults, opts.attribute_project)))
|
||||
sys.stdout.write(decode_it(b''.join(show_attribute_meta(apiurl, project, package, subpackage,
|
||||
opts.attribute, opts.attribute_defaults, opts.attribute_project))))
|
||||
elif cmd == 'prjconf':
|
||||
sys.stdout.write(''.join(show_project_conf(apiurl, project, rev=opts.revision, blame=opts.blame)))
|
||||
sys.stdout.write(decode_it(b''.join(show_project_conf(apiurl, project, rev=opts.revision, blame=opts.blame))))
|
||||
elif cmd == 'user':
|
||||
r = get_user_meta(apiurl, user)
|
||||
if r:
|
||||
sys.stdout.write(''.join(r))
|
||||
sys.stdout.write(decode_it(r))
|
||||
elif cmd == 'group':
|
||||
r = get_group_meta(apiurl, group)
|
||||
if r:
|
||||
sys.stdout.write(''.join(r))
|
||||
sys.stdout.write(decode_it(r))
|
||||
elif cmd == 'pattern':
|
||||
if pattern:
|
||||
r = show_pattern_meta(apiurl, project, pattern)
|
||||
@ -1390,9 +1396,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
if opts.diff or not opts.message:
|
||||
try:
|
||||
rdiff = 'old: %s/%s\nnew: %s/%s rev %s\n' % (dst_project, dst_package, src_project, src_package, rev)
|
||||
rdiff += server_diff(apiurl,
|
||||
rdiff += decode_it(server_diff(apiurl,
|
||||
dst_project, dst_package, None,
|
||||
src_project, src_package, rev, True)
|
||||
src_project, src_package, rev, True))
|
||||
except:
|
||||
rdiff = ''
|
||||
|
||||
@ -2483,7 +2489,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
action.tgt_project, action.tgt_package)
|
||||
diff += submit_action_diff(apiurl, action)
|
||||
diff += '\n\n'
|
||||
run_pager(diff, tmp_suffix='')
|
||||
run_pager(decode_it(diff), tmp_suffix='')
|
||||
|
||||
# checkout
|
||||
elif cmd == 'checkout' or cmd == 'co':
|
||||
@ -2983,7 +2989,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
revision=rev,
|
||||
comment=comment,
|
||||
keep_link=opts.keep_link)
|
||||
print(r)
|
||||
print(decode_it(r))
|
||||
|
||||
|
||||
@cmdln.option('-r', '--repo', metavar='REPO',
|
||||
@ -3495,7 +3501,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
devloc = None
|
||||
if not exists and (srcprj != args[0] or srcpkg != args[1]):
|
||||
try:
|
||||
root = ET.fromstring(''.join(show_attribute_meta(apiurl, args[0], None, None,
|
||||
root = ET.fromstring(b''.join(show_attribute_meta(apiurl, args[0], None, None,
|
||||
conf.config['maintained_update_project_attribute'], False, False)))
|
||||
# this might raise an AttributeError
|
||||
uproject = root.find('attribute').find('value').text
|
||||
@ -3649,7 +3655,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
kind = 'pkg'
|
||||
path_args = (project, package)
|
||||
meta = meta_exists(kind, path_args, create_new=False, apiurl=apiurl)
|
||||
root = ET.fromstring(''.join(meta))
|
||||
root = ET.fromstring(b''.join(meta))
|
||||
if root.find('lock') is not None:
|
||||
print('Already locked', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
@ -3854,9 +3860,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
for i in pac.get_diff(rev1):
|
||||
diff += ''.join(i)
|
||||
else:
|
||||
diff += server_diff_noex(pac.apiurl, pac.prjname, pac.name, rev1,
|
||||
diff += decode_it(server_diff_noex(pac.apiurl, pac.prjname, pac.name, rev1,
|
||||
pac.prjname, pac.name, rev2,
|
||||
not opts.plain, opts.missingok, opts.meta, not opts.unexpand)
|
||||
not opts.plain, opts.missingok, opts.meta, not opts.unexpand))
|
||||
run_pager(diff)
|
||||
|
||||
|
||||
@ -4135,12 +4141,12 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
close_fds=True)
|
||||
p.stdin.write(rdiff.encode())
|
||||
p.stdin.write(rdiff)
|
||||
p.stdin.close()
|
||||
print("".join(x.decode() for x in p.stdout.readlines()))
|
||||
print("".join(decode_it(x) for x in p.stdout.readlines()))
|
||||
elif opts.unified:
|
||||
print()
|
||||
print(rdiff)
|
||||
print(decode_it(rdiff))
|
||||
#run_pager(rdiff)
|
||||
|
||||
def _prdiff_output_matching_requests(self, opts, requests,
|
||||
@ -4285,7 +4291,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
else:
|
||||
raise oscerr.WrongArgs('Wrong number of arguments')
|
||||
|
||||
root = ET.fromstring(''.join(show_configuration(apiurl)))
|
||||
root = ET.fromstring(b''.join(show_configuration(apiurl)))
|
||||
elm = root.find('download_url')
|
||||
if elm is None or not elm.text:
|
||||
raise oscerr.APIError('download_url configuration element expected')
|
||||
@ -4530,7 +4536,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
# don't exclude packages with state ' ' because the packages
|
||||
# might have modified etc. files
|
||||
prj_excl = [st for st in excl_states if st != ' ']
|
||||
for st, pac in sorted(prj.get_status(*prj_excl), lambda x, y: cmp(x[1], y[1])):
|
||||
for st, pac in sorted(prj.get_status(*prj_excl), key=cmp_to_key(compare)):
|
||||
p = prj.get_pacobj(pac)
|
||||
if p is None:
|
||||
# state is != ' '
|
||||
@ -4541,11 +4547,11 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
elif st == ' ' and opts.verbose or st != ' ':
|
||||
lines.append(statfrmt(st, os.path.normpath(os.path.join(prj.dir, pac))))
|
||||
states = p.get_status(opts.show_excluded, *excl_states)
|
||||
for st, filename in sorted(states, lambda x, y: cmp(x[1], y[1])):
|
||||
for st, filename in sorted(states, key=cmp_to_key(compare)):
|
||||
lines.append(statfrmt(st, os.path.normpath(os.path.join(p.dir, filename))))
|
||||
else:
|
||||
p = findpacs([arg])[0]
|
||||
for st, filename in sorted(p.get_status(opts.show_excluded, *excl_states), lambda x, y: cmp(x[1], y[1])):
|
||||
for st, filename in sorted(p.get_status(opts.show_excluded, *excl_states), key=cmp_to_key(compare)):
|
||||
lines.append(statfrmt(st, os.path.normpath(os.path.join(p.dir, filename))))
|
||||
if lines:
|
||||
print('\n'.join(lines))
|
||||
@ -5227,7 +5233,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
del kwargs['showexcl']
|
||||
for xml in get_package_results(**kwargs):
|
||||
if opts.xml:
|
||||
print(xml, end='')
|
||||
print(decode_it(xml), end='')
|
||||
else:
|
||||
# csv formatting
|
||||
results = [r for r, _ in result_xml_to_dicts(xml)]
|
||||
@ -5290,7 +5296,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
kwargs['arch'] = opts.arch
|
||||
kwargs['wait'] = opts.watch
|
||||
for results in get_package_results(apiurl, project, **kwargs):
|
||||
print(results)
|
||||
print(decode_it(results))
|
||||
return
|
||||
|
||||
if opts.watch:
|
||||
@ -5345,7 +5351,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
else:
|
||||
raise oscerr.WrongArgs('please provide project package repository arch.')
|
||||
|
||||
print(get_rpmlint_log(apiurl, project, package, repository, arch))
|
||||
print(decode_it(get_rpmlint_log(apiurl, project, package, repository, arch)))
|
||||
|
||||
@cmdln.alias('bl')
|
||||
@cmdln.alias('blt')
|
||||
@ -5791,7 +5797,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
|
||||
build_descr_data = None
|
||||
if not build_descr is None:
|
||||
build_descr_data = open(build_descr, 'r').read()
|
||||
build_descr_data = open(build_descr, 'rb').read()
|
||||
if opts.prefer_pkgs and build_descr_data is None:
|
||||
raise oscerr.WrongArgs('error: a build description is needed if \'--prefer-pkgs\' is used')
|
||||
elif opts.prefer_pkgs:
|
||||
@ -5802,13 +5808,13 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
prefer_pkgs = get_prefer_pkgs(opts.prefer_pkgs, arch,
|
||||
os.path.splitext(build_descr)[1],
|
||||
cpiodata)
|
||||
cpiodata.add(os.path.basename(build_descr), build_descr_data)
|
||||
cpiodata.add(os.path.basename(build_descr.encode()), build_descr_data)
|
||||
build_descr_data = cpiodata.get()
|
||||
|
||||
if opts.multibuild_package:
|
||||
package = package + ":" + opts.multibuild_package
|
||||
|
||||
print(''.join(get_buildinfo(apiurl,
|
||||
print(decode_it(get_buildinfo(apiurl,
|
||||
project, package, repository, arch,
|
||||
specfile=build_descr_data,
|
||||
debug=opts.debug,
|
||||
@ -5855,7 +5861,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
else:
|
||||
raise oscerr.WrongArgs('Wrong number of arguments.')
|
||||
|
||||
print(''.join(get_buildconfig(apiurl, project, repository)))
|
||||
print(decode_it(get_buildconfig(apiurl, project, repository)))
|
||||
|
||||
|
||||
def do_workerinfo(self, subcmd, opts, worker):
|
||||
@ -6097,6 +6103,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
recipe = recipe.strip()
|
||||
if recipe == 'arch':
|
||||
recipe = 'PKGBUILD'
|
||||
recipe = decode_it(recipe)
|
||||
pac = os.path.basename(os.getcwd())
|
||||
if is_package_dir(os.getcwd()):
|
||||
pac = store_read_package(os.getcwd())
|
||||
@ -7280,7 +7287,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
package = pac,
|
||||
target_filename = fname,
|
||||
target_mtime = i.mtime,
|
||||
progress_meter = not opts.quiet)
|
||||
progress_meter = opts.quiet)
|
||||
|
||||
|
||||
@cmdln.option('-b', '--bugowner', action='store_true',
|
||||
@ -7366,7 +7373,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
what = {'project': ''}
|
||||
elif type in args_sr:
|
||||
requests = get_request_collection(apiurl, 'creator', req_who=user)
|
||||
for r in sorted(requests):
|
||||
for r in sorted(requests, key=lambda x: x.reqid):
|
||||
print(r.list_view(), '\n')
|
||||
return
|
||||
elif not type in args_pkg:
|
||||
@ -7495,7 +7502,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
if list_requests:
|
||||
# old style, only for OBS 2.1 and before. Should not be used, since it is slow and incomplete
|
||||
requests = get_user_projpkgs_request_list(apiurl, user, projpkgs=request_todo)
|
||||
for r in sorted(requests):
|
||||
for r in sorted(requests, key=lambda x: x.reqid):
|
||||
print(r.list_view(), '\n')
|
||||
if not len(requests):
|
||||
print(" -> try also 'osc my sr' to see more.")
|
||||
@ -7734,8 +7741,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
continue
|
||||
# construct a sorted, flat list
|
||||
# Sort by first column, follwed by second column if we have two columns, else sort by first.
|
||||
results.sort(lambda x, y: ( cmp(x[0], y[0]) or
|
||||
(len(x)>1 and len(y)>1 and cmp(x[1], y[1])) ))
|
||||
# results.sort(lambda x, y: ( cmp(x[0], y[0]) or
|
||||
# (len(x)>1 and len(y)>1 and cmp(x[1], y[1])) ))
|
||||
results.sort(key=cmp_to_key(compare))
|
||||
new = []
|
||||
for i in results:
|
||||
new.extend(i)
|
||||
@ -7955,7 +7963,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
data=opts.data,
|
||||
file=opts.file,
|
||||
headers=opts.headers)
|
||||
out = r.read()
|
||||
out = decode_it(r.read())
|
||||
|
||||
if opts.edit:
|
||||
text = edit_text(out)
|
||||
@ -7963,7 +7971,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
url,
|
||||
data=text,
|
||||
headers=opts.headers)
|
||||
out = r.read()
|
||||
out = decode_it(r.read())
|
||||
|
||||
sys.stdout.write(out)
|
||||
|
||||
@ -8177,7 +8185,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
else:
|
||||
if pac:
|
||||
m = show_package_meta(apiurl, prj, pac)
|
||||
metaroot = ET.fromstring(''.join(m))
|
||||
metaroot = ET.fromstring(b''.join(m))
|
||||
if not opts.nodevelproject:
|
||||
while metaroot.findall('devel'):
|
||||
d = metaroot.find('devel')
|
||||
@ -8186,18 +8194,18 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
if opts.verbose:
|
||||
print("Following to the development space: %s/%s" % (prj, pac))
|
||||
m = show_package_meta(apiurl, prj, pac)
|
||||
metaroot = ET.fromstring(''.join(m))
|
||||
metaroot = ET.fromstring(b''.join(m))
|
||||
if not metaroot.findall('person') and not metaroot.findall('group'):
|
||||
if opts.verbose:
|
||||
print("No dedicated persons in package defined, showing the project persons.")
|
||||
pac = None
|
||||
m = show_project_meta(apiurl, prj)
|
||||
metaroot = ET.fromstring(''.join(m))
|
||||
metaroot = ET.fromstring(b''.join(m))
|
||||
else:
|
||||
# fallback to project lookup for old servers
|
||||
if prj and not searchresult:
|
||||
m = show_project_meta(apiurl, prj)
|
||||
metaroot = ET.fromstring(''.join(m))
|
||||
metaroot = ET.fromstring(b''.join(m))
|
||||
|
||||
# extract the maintainers
|
||||
projects = []
|
||||
@ -8358,10 +8366,13 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
u = makeurl(apiurl, ['source', project, package, filename], query=query)
|
||||
if subcmd == 'less':
|
||||
f = http_GET(u)
|
||||
run_pager(''.join(f.readlines()))
|
||||
run_pager(b''.join(f.readlines()))
|
||||
else:
|
||||
for data in streamfile(u):
|
||||
sys.stdout.write(data)
|
||||
if isinstance(data, str):
|
||||
sys.stdout.write(data)
|
||||
else:
|
||||
sys.stdout.write(decode_it(data))
|
||||
|
||||
|
||||
# helper function to download a file from a specific revision
|
||||
@ -8457,7 +8468,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
u = makeurl(apiurl, ['source', prj, package], query=query)
|
||||
f = http_GET(u)
|
||||
meta = f.readlines()
|
||||
root_new = ET.fromstring(''.join(meta))
|
||||
root_new = ET.fromstring(b''.join(meta))
|
||||
dir_new = { 'apiurl': apiurl, 'project': prj, 'package': package }
|
||||
dir_new['srcmd5'] = root_new.get('srcmd5')
|
||||
dir_new['entries'] = [[n.get('name'), n.get('md5')] for n in root_new.findall('entry')]
|
||||
@ -8501,7 +8512,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
os.mkdir(destdir)
|
||||
|
||||
Package.init_package(apiurl, target_prj, target_package, destdir)
|
||||
store_write_string(destdir, '_files', ''.join(meta) + '\n')
|
||||
store_write_string(destdir, '_files', b''.join(meta) + b'\n')
|
||||
store_write_string(destdir, '_linkrepair', '')
|
||||
pac = Package(destdir)
|
||||
|
||||
@ -8610,7 +8621,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
u = makeurl(p.apiurl, ['source', p.prjname, p.name], query=query)
|
||||
f = http_GET(u)
|
||||
meta = f.readlines()
|
||||
root_new = ET.fromstring(''.join(meta))
|
||||
root_new = ET.fromstring(b''.join(meta))
|
||||
linkinfo_new = root_new.find('linkinfo')
|
||||
if linkinfo_new == None:
|
||||
raise oscerr.APIError('link is not a really a link?')
|
||||
@ -8783,7 +8794,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
buf = f.read(16384)
|
||||
if not buf:
|
||||
break
|
||||
sys.stdout.write(buf)
|
||||
sys.stdout.write(decode_it(buf))
|
||||
|
||||
@cmdln.option('-m', '--message',
|
||||
help='add MESSAGE to changes (do not open an editor)')
|
||||
@ -8823,7 +8834,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
import glob, re
|
||||
try:
|
||||
fn_changelog = glob.glob('*.changes')[0]
|
||||
fp = file(fn_changelog)
|
||||
fp = open(fn_changelog)
|
||||
titleline = fp.readline()
|
||||
fp.close()
|
||||
if re.match('^\*\W+(.+\W+\d{1,2}\W+20\d{2})\W+(.+)\W+<(.+)>\W+(.+)$', titleline):
|
||||
|
154
osc/core.py
154
osc/core.py
@ -24,6 +24,7 @@ import errno
|
||||
import shlex
|
||||
import hashlib
|
||||
|
||||
|
||||
try:
|
||||
from urllib.parse import urlsplit, urlunsplit, urlparse, quote_plus, urlencode, unquote
|
||||
from urllib.error import HTTPError
|
||||
@ -49,6 +50,13 @@ except ImportError:
|
||||
from . import oscerr
|
||||
from . import conf
|
||||
|
||||
try:
|
||||
from functools import cmp_to_key
|
||||
except ImportError:
|
||||
from .util.helper import cmp_to_key
|
||||
|
||||
from osc.util.helper import decode_list, decode_it
|
||||
|
||||
try:
|
||||
# python 2.6 and python 2.7
|
||||
unicode
|
||||
@ -62,6 +70,11 @@ except:
|
||||
unicode = lambda x, *args: x
|
||||
ET_ENCODING = "unicode"
|
||||
|
||||
def compare(a, b): return cmp(a[1:], b[1:])
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
|
||||
BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
|
||||
BUFSIZE = 1024*1024
|
||||
@ -331,7 +344,7 @@ class Serviceinfo:
|
||||
def addVerifyFile(self, serviceinfo_node, filename):
|
||||
import hashlib
|
||||
|
||||
f = open(filename, 'r')
|
||||
f = open(filename, 'rb')
|
||||
digest = hashlib.sha256(f.read()).hexdigest()
|
||||
f.close()
|
||||
|
||||
@ -1849,8 +1862,8 @@ class Package:
|
||||
meta = show_package_meta(self.apiurl, self.prjname, self.name)
|
||||
if meta != "":
|
||||
# is empty for _project for example
|
||||
meta = ''.join(meta)
|
||||
store_write_string(self.absdir, '_meta', meta + '\n')
|
||||
meta = b''.join(meta)
|
||||
store_write_string(self.absdir, '_meta', meta + b'\n')
|
||||
|
||||
def findfilebyname(self, n):
|
||||
for i in self.filelist:
|
||||
@ -2115,7 +2128,7 @@ rev: %s
|
||||
argument force supress the confirm question
|
||||
"""
|
||||
|
||||
m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
|
||||
m = b''.join(show_package_meta(self.apiurl, self.prjname, self.name))
|
||||
|
||||
root = ET.fromstring(m)
|
||||
root.find('title').text = self.summary
|
||||
@ -2130,7 +2143,7 @@ rev: %s
|
||||
|
||||
if not force:
|
||||
print('*' * 36, 'old', '*' * 36)
|
||||
print(m)
|
||||
print(decode_it(m))
|
||||
print('*' * 36, 'new', '*' * 36)
|
||||
print(ET.tostring(root, encoding=ET_ENCODING))
|
||||
print('*' * 72)
|
||||
@ -3565,7 +3578,7 @@ def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with
|
||||
|
||||
def show_devel_project(apiurl, prj, pac):
|
||||
m = show_package_meta(apiurl, prj, pac)
|
||||
node = ET.fromstring(''.join(m)).find('devel')
|
||||
node = ET.fromstring(b''.join(m)).find('devel')
|
||||
if node is None:
|
||||
return None, None
|
||||
else:
|
||||
@ -3574,7 +3587,7 @@ def show_devel_project(apiurl, prj, pac):
|
||||
|
||||
def set_devel_project(apiurl, prj, pac, devprj=None, devpac=None):
|
||||
meta = show_package_meta(apiurl, prj, pac)
|
||||
root = ET.fromstring(''.join(meta))
|
||||
root = ET.fromstring(b''.join(meta))
|
||||
node = root.find('devel')
|
||||
if node is None:
|
||||
if devprj is None:
|
||||
@ -3647,8 +3660,12 @@ class metafile:
|
||||
self.url = url
|
||||
self.change_is_required = change_is_required
|
||||
(fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
|
||||
f = os.fdopen(fd, 'w')
|
||||
f.write(''.join(input))
|
||||
if not input or isinstance(input[0], str) or isinstance(input, str):
|
||||
f = os.fdopen(fd, 'w')
|
||||
f.write(''.join(input))
|
||||
else:
|
||||
f = os.fdopen(fd, 'wb')
|
||||
f.write(b''.join(input))
|
||||
f.close()
|
||||
self.hash_orig = dgst(self.filename)
|
||||
|
||||
@ -3681,8 +3698,8 @@ class metafile:
|
||||
# examine the error - we can't raise an exception because we might want
|
||||
# to try again
|
||||
data = e.read()
|
||||
if '<summary>' in data:
|
||||
print(data.split('<summary>')[1].split('</summary>')[0], file=sys.stderr)
|
||||
if b'<summary>' in data:
|
||||
print(data.split(b'<summary>')[1].split(b'</summary>')[0], file=sys.stderr)
|
||||
ri = raw_input('Try again? ([y/N]): ')
|
||||
if ri not in ['y', 'Y']:
|
||||
break
|
||||
@ -3799,7 +3816,20 @@ def edit_meta(metatype,
|
||||
if metatype == 'pkg':
|
||||
# check if the package is a link to a different project
|
||||
project, package = path_args
|
||||
orgprj = ET.fromstring(''.join(data)).get('project')
|
||||
# data can be a bytes object, a list with strings, a list with bytes, just a string.
|
||||
# So we need the following even if it is ugly.
|
||||
if sys.version_info >= (3, 0):
|
||||
if isinstance(data, bytes):
|
||||
data = decode_it(data)
|
||||
orgprj = ET.fromstring(''.join(data)).get('project')
|
||||
elif isinstance(data, list):
|
||||
decode_data = decode_list(data)
|
||||
orgprj = ET.fromstring(''.join(decode_data)).get('project')
|
||||
else:
|
||||
orgprj = ET.fromstring(''.join(data)).get('project')
|
||||
else:
|
||||
orgprj = ET.fromstring(''.join(data)).get('project')
|
||||
|
||||
if orgprj is not None and unquote(project) != orgprj:
|
||||
print('The package is linked from a different project.')
|
||||
print('If you want to edit the meta of the package create first a branch.')
|
||||
@ -3839,7 +3869,7 @@ def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None,
|
||||
|
||||
def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=False, include_service_files=False, deleted=False):
|
||||
m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta, deleted=deleted)
|
||||
et = ET.fromstring(''.join(m))
|
||||
et = ET.fromstring(m)
|
||||
if include_service_files:
|
||||
try:
|
||||
sinfo = et.find('serviceinfo')
|
||||
@ -3852,7 +3882,7 @@ def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=Fal
|
||||
|
||||
def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=False, include_service_files=False):
|
||||
m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta, expand=include_service_files)
|
||||
et = ET.fromstring(''.join(m))
|
||||
et = ET.fromstring(m)
|
||||
if include_service_files:
|
||||
return et.get('srcmd5')
|
||||
|
||||
@ -3891,7 +3921,7 @@ def get_project_sourceinfo(apiurl, project, nofilename, *packages):
|
||||
raise
|
||||
if len(packages) == 1:
|
||||
raise oscerr.APIError('package name too long: %s' % packages[0])
|
||||
n = len(packages) / 2
|
||||
n = int(len(packages) / 2)
|
||||
pkgs = packages[:n]
|
||||
res = get_project_sourceinfo(apiurl, project, nofilename, *pkgs)
|
||||
pkgs = packages[n:]
|
||||
@ -3906,12 +3936,12 @@ def get_project_sourceinfo(apiurl, project, nofilename, *packages):
|
||||
|
||||
def show_upstream_rev_vrev(apiurl, prj, pac, revision=None, expand=False, meta=False):
|
||||
m = show_files_meta(apiurl, prj, pac, revision=revision, expand=expand, meta=meta)
|
||||
et = ET.fromstring(''.join(m))
|
||||
et = ET.fromstring(m)
|
||||
return et.get('rev'), et.get('vrev')
|
||||
|
||||
def show_upstream_rev(apiurl, prj, pac, revision=None, expand=False, linkrev=None, meta=False, include_service_files=False):
|
||||
m = show_files_meta(apiurl, prj, pac, revision=revision, expand=expand, linkrev=linkrev, meta=meta)
|
||||
et = ET.fromstring(''.join(m))
|
||||
et = ET.fromstring(m)
|
||||
if include_service_files:
|
||||
try:
|
||||
sinfo = et.find('serviceinfo')
|
||||
@ -4012,10 +4042,16 @@ def run_pager(message, tmp_suffix=''):
|
||||
return
|
||||
|
||||
if not sys.stdout.isatty():
|
||||
print(message)
|
||||
if isinstance(message, str):
|
||||
print(message)
|
||||
else:
|
||||
print(decode_it(message))
|
||||
else:
|
||||
tmpfile = tempfile.NamedTemporaryFile(suffix=tmp_suffix)
|
||||
tmpfile.write(message)
|
||||
if isinstance(message, str):
|
||||
tmpfile.write(bytes(message, 'utf-8'))
|
||||
else:
|
||||
tmpfile.write(message)
|
||||
tmpfile.flush()
|
||||
pager = os.getenv('PAGER', default=get_default_pager())
|
||||
cmd = shlex.split(pager) + [tmpfile.name]
|
||||
@ -4042,6 +4078,8 @@ def _edit_message_open_editor(filename, data, orig_mtime):
|
||||
import tempfile
|
||||
editor = _editor_command()
|
||||
mtime = os.stat(filename).st_mtime
|
||||
if isinstance(data, str):
|
||||
data = bytes(data, 'utf-8')
|
||||
if mtime == orig_mtime:
|
||||
# prepare file for editors
|
||||
if editor[0] in ('vi', 'vim'):
|
||||
@ -4051,7 +4089,7 @@ def _edit_message_open_editor(filename, data, orig_mtime):
|
||||
editor.extend(['-c', ':r %s' % f.name, filename])
|
||||
run_external(editor[0], *editor[1:])
|
||||
else:
|
||||
with open(filename, 'w') as f:
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(data)
|
||||
orig_mtime = os.stat(filename).st_mtime
|
||||
run_editor(filename)
|
||||
@ -4543,7 +4581,7 @@ def get_group_meta(apiurl, group):
|
||||
u = makeurl(apiurl, ['group', quote_plus(group)])
|
||||
try:
|
||||
f = http_GET(u)
|
||||
return ''.join(f.readlines())
|
||||
return b''.join(f.readlines())
|
||||
except HTTPError:
|
||||
print('group \'%s\' not found' % group)
|
||||
return None
|
||||
@ -4552,7 +4590,7 @@ def get_user_meta(apiurl, user):
|
||||
u = makeurl(apiurl, ['person', quote_plus(user)])
|
||||
try:
|
||||
f = http_GET(u)
|
||||
return ''.join(f.readlines())
|
||||
return b''.join(f.readlines())
|
||||
except HTTPError:
|
||||
print('user \'%s\' not found' % user)
|
||||
return None
|
||||
@ -4594,7 +4632,10 @@ def download(url, filename, progress_obj = None, mtime = None):
|
||||
try:
|
||||
o = os.fdopen(fd, 'wb')
|
||||
for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
|
||||
o.write(bytes(buf, "utf-8"))
|
||||
if isinstance(buf, str):
|
||||
o.write(bytes(buf, "utf-8"))
|
||||
else:
|
||||
o.write(buf)
|
||||
o.close()
|
||||
os.rename(tmpfile, filename)
|
||||
except:
|
||||
@ -4807,7 +4848,7 @@ def server_diff_noex(apiurl,
|
||||
msg = None
|
||||
body = None
|
||||
try:
|
||||
body = e.read()
|
||||
body = decode_it(e.read())
|
||||
if not 'bad link' in body:
|
||||
return '# diff failed: ' + body
|
||||
except:
|
||||
@ -5027,7 +5068,7 @@ def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
|
||||
only maintainer (unless keep_maintainers is set). Additionally remove the
|
||||
develproject entry (<devel />) unless keep_develproject is true.
|
||||
"""
|
||||
root = ET.fromstring(''.join(pkgmeta))
|
||||
root = ET.fromstring(b''.join(pkgmeta))
|
||||
root.set('name', new_name)
|
||||
root.set('project', new_prj)
|
||||
# never take releasename, it needs to be explicit
|
||||
@ -5512,7 +5553,7 @@ def get_distibutions(apiurl, discon=False):
|
||||
else:
|
||||
result_line_templ = '%(name)-25s %(project)-25s %(repository)-25s %(reponame)s'
|
||||
f = http_GET(makeurl(apiurl, ['distributions']))
|
||||
root = ET.fromstring(''.join(f))
|
||||
root = ET.fromstring(b''.join(f))
|
||||
|
||||
for node in root.findall('distribution'):
|
||||
rmap = {}
|
||||
@ -5538,7 +5579,7 @@ def get_platforms_of_project(apiurl, prj):
|
||||
|
||||
def get_repositories_of_project(apiurl, prj):
|
||||
f = show_project_meta(apiurl, prj)
|
||||
root = ET.fromstring(''.join(f))
|
||||
root = ET.fromstring(b''.join(f))
|
||||
|
||||
r = [ node.get('name') for node in root.findall('repository')]
|
||||
return r
|
||||
@ -5580,7 +5621,7 @@ class Repo:
|
||||
|
||||
def get_repos_of_project(apiurl, prj):
|
||||
f = show_project_meta(apiurl, prj)
|
||||
root = ET.fromstring(''.join(f))
|
||||
root = ET.fromstring(b''.join(f))
|
||||
|
||||
for node in root.findall('repository'):
|
||||
for node2 in node.findall('arch'):
|
||||
@ -5751,7 +5792,7 @@ def get_package_results(apiurl, project, package=None, wait=False, *args, **kwar
|
||||
while True:
|
||||
waiting = False
|
||||
try:
|
||||
xml = ''.join(show_results_meta(apiurl, project, package, *args, **kwargs))
|
||||
xml = b''.join(show_results_meta(apiurl, project, package, *args, **kwargs))
|
||||
except HTTPError as e:
|
||||
# check for simple timeout error and fetch again
|
||||
if e.code == 502 or e.code == 504:
|
||||
@ -5792,7 +5833,7 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non
|
||||
r = []
|
||||
|
||||
f = show_prj_results_meta(apiurl, prj)
|
||||
root = ET.fromstring(''.join(f))
|
||||
root = ET.fromstring(b''.join(f))
|
||||
|
||||
pacs = []
|
||||
# sequence of (repo,arch) tuples
|
||||
@ -5972,7 +6013,6 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non
|
||||
return r
|
||||
|
||||
|
||||
|
||||
def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=None, text=None):
|
||||
"""
|
||||
performs http_meth on url and read bufsize bytes from the response
|
||||
@ -6032,8 +6072,12 @@ def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=
|
||||
|
||||
def buildlog_strip_time(data):
|
||||
"""Strips the leading build time from the log"""
|
||||
time_regex = re.compile('^\[[^\]]*\] ', re.M)
|
||||
return time_regex.sub('', data)
|
||||
if isinstance(data, str):
|
||||
time_regex = re.compile('^\[[^\]]*\] ', re.M)
|
||||
return time_regex.sub('', data)
|
||||
else:
|
||||
time_regex = re.compile(b'^\[[^\]]*\] ', re.M)
|
||||
return time_regex.sub(b'', data)
|
||||
|
||||
|
||||
def print_buildlog(apiurl, prj, package, repository, arch, offset=0, strip_time=False, last=False):
|
||||
@ -6042,11 +6086,14 @@ def print_buildlog(apiurl, prj, package, repository, arch, offset=0, strip_time=
|
||||
def print_data(data, strip_time=False):
|
||||
if strip_time:
|
||||
data = buildlog_strip_time(data)
|
||||
sys.stdout.write(data.translate(all_bytes, remove_bytes))
|
||||
sys.stdout.write(decode_it(data.translate(all_bytes, remove_bytes)))
|
||||
|
||||
# to protect us against control characters
|
||||
import string
|
||||
all_bytes = string.maketrans('', '')
|
||||
if sys.version_info >= (3, 0):
|
||||
all_bytes = bytes.maketrans(b'', b'')
|
||||
else:
|
||||
all_bytes = string.maketrans(b'', b'')
|
||||
remove_bytes = all_bytes[:8] + all_bytes[14:32] # accept tabs and newlines
|
||||
|
||||
query = {'nostream' : '1', 'start' : '%s' % offset}
|
||||
@ -6058,7 +6105,7 @@ def print_buildlog(apiurl, prj, package, repository, arch, offset=0, strip_time=
|
||||
start_offset = offset
|
||||
u = makeurl(apiurl, ['build', prj, repository, arch, package, '_log'], query=query)
|
||||
try:
|
||||
for data in streamfile(u, bufsize="line"):
|
||||
for data in streamfile(u):
|
||||
offset += len(data)
|
||||
print_data(data, strip_time)
|
||||
except IncompleteRead as e:
|
||||
@ -6119,7 +6166,7 @@ def get_worker_info(apiurl, worker):
|
||||
u = makeurl(apiurl, ['worker', worker])
|
||||
f = http_GET(u)
|
||||
|
||||
return f.read()
|
||||
return decode_it(f.read())
|
||||
|
||||
|
||||
def check_constraints(apiurl, prj, repository, arch, package, constraintsfile=None):
|
||||
@ -6130,7 +6177,7 @@ def check_constraints(apiurl, prj, repository, arch, package, constraintsfile=No
|
||||
query['arch'] = arch
|
||||
u = makeurl(apiurl, ['worker'], query)
|
||||
f = http_POST(u, data=constraintsfile)
|
||||
root = ET.fromstring(''.join(f))
|
||||
root = ET.fromstring(b''.join(f))
|
||||
return [node.get('name') for node in root.findall('entry')]
|
||||
|
||||
|
||||
@ -6271,7 +6318,7 @@ def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = False,
|
||||
try:
|
||||
comment = node.find('comment').text.encode(locale.getpreferredencoding(), 'replace')
|
||||
except:
|
||||
comment = '<no message>'
|
||||
comment = b'<no message>'
|
||||
try:
|
||||
requestid = node.find('requestid').text.encode(locale.getpreferredencoding(), 'replace')
|
||||
except:
|
||||
@ -6294,10 +6341,10 @@ def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = False,
|
||||
r.append('</logentry>')
|
||||
else:
|
||||
if requestid:
|
||||
requestid = "rq" + requestid
|
||||
requestid = decode_it((b"rq" + requestid))
|
||||
s = '-' * 76 + \
|
||||
'\nr%s | %s | %s | %s | %s | %s\n' % (rev, user, t, srcmd5, version, requestid) + \
|
||||
'\n' + comment
|
||||
'\n' + decode_it(comment)
|
||||
r.append(s)
|
||||
|
||||
if format not in ['csv', 'xml']:
|
||||
@ -6431,6 +6478,8 @@ def store_write_string(dir, file, string, subdir=''):
|
||||
fname = os.path.join(dir, store, subdir, file)
|
||||
try:
|
||||
f = open(fname + '.new', 'w')
|
||||
if not isinstance(string, str):
|
||||
string = decode_it(string)
|
||||
f.write(string)
|
||||
f.close()
|
||||
os.rename(fname + '.new', fname)
|
||||
@ -6809,7 +6858,11 @@ def is_rpm(f):
|
||||
except:
|
||||
return False
|
||||
|
||||
if h == '\xed\xab\xee\xdb':
|
||||
if isinstance(h, str):
|
||||
isrpmstr = '\xed\xab\xee\xdb'
|
||||
else:
|
||||
isrpmstr = b'\xed\xab\xee\xdb'
|
||||
if h == isrpmstr:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@ -6825,7 +6878,8 @@ def is_srcrpm(f):
|
||||
except:
|
||||
return False
|
||||
|
||||
if h[7] == '\x01':
|
||||
issrcrpm = bytes(bytearray([h[7]])).decode('utf-8')
|
||||
if issrcrpm == '\x01':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@ -6847,7 +6901,7 @@ def addPerson(apiurl, prj, pac, user, role="maintainer"):
|
||||
create_new=False)
|
||||
|
||||
if data and get_user_meta(apiurl, user) != None:
|
||||
root = ET.fromstring(''.join(data))
|
||||
root = ET.fromstring(b''.join(data))
|
||||
found = False
|
||||
for person in root.getiterator('person'):
|
||||
if person.get('userid') == user and person.get('role') == role:
|
||||
@ -7025,7 +7079,7 @@ def addDownloadUrlService(url):
|
||||
|
||||
# for pretty output
|
||||
xmlindent(s)
|
||||
f = open(service_file, 'wb')
|
||||
f = open(service_file, 'w')
|
||||
f.write(ET.tostring(s, encoding=ET_ENCODING))
|
||||
f.close()
|
||||
if addfile:
|
||||
@ -7047,7 +7101,7 @@ def addDownloadUrlService(url):
|
||||
|
||||
# for pretty output
|
||||
xmlindent(s)
|
||||
f = open(service_file, 'wb')
|
||||
f = open(service_file, 'w')
|
||||
f.write(ET.tostring(s, encoding=ET_ENCODING))
|
||||
f.close()
|
||||
|
||||
@ -7208,7 +7262,7 @@ def get_commit_msg(wc_dir, pacs):
|
||||
footer = []
|
||||
lines = []
|
||||
for p in pacs:
|
||||
states = sorted(p.get_status(False, ' ', '?'), lambda x, y: cmp(x[1], y[1]))
|
||||
states = sorted(p.get_status(False, ' ', '?'), key=cmp_to_key(compare))
|
||||
changed = [statfrmt(st, os.path.normpath(os.path.join(p.dir, filename))) for st, filename in states]
|
||||
if changed:
|
||||
footer += changed
|
||||
@ -7287,7 +7341,7 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None,
|
||||
except (ValueError, IndexError):
|
||||
print('Invalid rpmlintlog index. Please choose between 0 and %i' % (len(lintlogs)-1))
|
||||
try:
|
||||
print(get_rpmlint_log(apiurl, **lintlogs[lint_n]))
|
||||
print(decode_it(get_rpmlint_log(apiurl, **lintlogs[lint_n])))
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
print('No rpmlintlog for %s %s' % (lintlogs[lint_n]['repo'],
|
||||
@ -7363,12 +7417,12 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None,
|
||||
tmpfile.close()
|
||||
tmpfile = None
|
||||
if tmpfile is None:
|
||||
tmpfile = tempfile.NamedTemporaryFile(suffix='.diff')
|
||||
tmpfile = tempfile.NamedTemporaryFile(suffix='.diff', mode='r+')
|
||||
tmpfile.write(req_summary)
|
||||
tmpfile.write(issues)
|
||||
try:
|
||||
diff = request_diff(apiurl, request.reqid)
|
||||
tmpfile.write(diff)
|
||||
tmpfile.write(decode_it(diff))
|
||||
except HTTPError as e:
|
||||
if e.code != 400:
|
||||
raise
|
||||
|
@ -5,9 +5,8 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import M2Crypto.httpslib
|
||||
from M2Crypto.SSL.Checker import SSLVerificationError
|
||||
from M2Crypto import m2, SSL
|
||||
from M2Crypto import m2, SSL, httpslib
|
||||
import M2Crypto.m2urllib2
|
||||
import socket
|
||||
import sys
|
||||
@ -185,22 +184,28 @@ class myHTTPSHandler(M2Crypto.m2urllib2.HTTPSHandler):
|
||||
# "do_open()" and "https_open()" so that we just need to override
|
||||
# the small "https_open()" method...)
|
||||
def https_open(self, req):
|
||||
host = req.get_host()
|
||||
# https://docs.python.org/3.3/library/urllib.request.html#urllib.request.Request.get_host
|
||||
try: # up to python-3.2
|
||||
host = req.get_host()
|
||||
except AttributeError: # from python-3.3
|
||||
host = req.host
|
||||
if not host:
|
||||
raise M2Crypto.m2urllib2.URLError('no host given: ' + req.get_full_url())
|
||||
raise M2Crypto.m2urllib2.URLError('no host given')
|
||||
|
||||
# Our change: Check to see if we're using a proxy.
|
||||
# Then create an appropriate ssl-aware connection.
|
||||
full_url = req.get_full_url()
|
||||
target_host = urlparse(full_url)[1]
|
||||
|
||||
if (target_host != host):
|
||||
h = myProxyHTTPSConnection(host = host, appname = self.appname, ssl_context = self.ctx)
|
||||
# M2Crypto.ProxyHTTPSConnection.putrequest expects a fullurl
|
||||
selector = full_url
|
||||
if target_host != host:
|
||||
request_uri = urldefrag(full_url)[0]
|
||||
h = httpslib.ProxyHTTPSConnection(host=host, ssl_context=self.ctx)
|
||||
else:
|
||||
h = myHTTPSConnection(host = host, appname = self.appname, ssl_context = self.ctx)
|
||||
selector = req.get_selector()
|
||||
try: # up to python-3.2
|
||||
request_uri = req.get_selector()
|
||||
except AttributeError: # from python-3.3
|
||||
request_uri = req.selector
|
||||
h = httpslib.HTTPSConnection(host=host, ssl_context=self.ctx)
|
||||
# End our change
|
||||
h.set_debuglevel(self._debuglevel)
|
||||
|
||||
@ -214,10 +219,9 @@ class myHTTPSHandler(M2Crypto.m2urllib2.HTTPSHandler):
|
||||
# request.
|
||||
headers["Connection"] = "close"
|
||||
try:
|
||||
h.request(req.get_method(), selector, req.data, headers)
|
||||
h.request(req.get_method(), request_uri, req.data, headers)
|
||||
r = h.getresponse()
|
||||
except socket.error as err: # XXX what error?
|
||||
err.filename = full_url
|
||||
except socket.error as err: # XXX what error?
|
||||
raise M2Crypto.m2urllib2.URLError(err)
|
||||
|
||||
# Pick apart the HTTPResponse object to get the addinfourl
|
||||
@ -227,18 +231,26 @@ class myHTTPSHandler(M2Crypto.m2urllib2.HTTPSHandler):
|
||||
# for Windows. That adapter calls recv(), so delegate recv()
|
||||
# to read(). This weird wrapping allows the returned object to
|
||||
# have readline() and readlines() methods.
|
||||
|
||||
# XXX It might be better to extract the read buffering code
|
||||
# out of socket._fileobject() and into a base class.
|
||||
|
||||
r.recv = r.read
|
||||
fp = socket._fileobject(r)
|
||||
if (sys.version_info < (3, 0)):
|
||||
fp = socket._fileobject(r, close=True)
|
||||
else:
|
||||
r._decref_socketios = lambda: None
|
||||
r.ssl = h.sock.ssl
|
||||
r._timeout = -1.0
|
||||
# hack to bypass python3 bug with 0 buffer size and
|
||||
# http/client.py readinto method for response class
|
||||
if r.length is not None and r.length == 0:
|
||||
r.readinto = lambda b: 0
|
||||
r.recv_into = r.readinto
|
||||
fp = socket.SocketIO(r, 'rb')
|
||||
|
||||
resp = addinfourl(fp, r.msg, req.get_full_url())
|
||||
resp.code = r.status
|
||||
resp.msg = r.reason
|
||||
return resp
|
||||
|
||||
|
||||
class myHTTPSConnection(M2Crypto.httpslib.HTTPSConnection):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.appname = kwargs.pop('appname', 'generic')
|
||||
|
65
osc/util/helper.py
Normal file
65
osc/util/helper.py
Normal file
@ -0,0 +1,65 @@
|
||||
# Copyright (C) 2018 SUSE Linux. All rights reserved.
|
||||
# This program is free software; it may be used, copied, modified
|
||||
# and distributed under the terms of the GNU General Public Licence,
|
||||
# either version 2, or (at your option) any later version.
|
||||
|
||||
|
||||
def cmp_to_key(mycmp):
|
||||
""" Converts a cmp= function into a key= function.
|
||||
"""
|
||||
|
||||
class K(object):
|
||||
def __init__(self, obj, *args):
|
||||
self.obj = obj
|
||||
|
||||
def __lt__(self, other):
|
||||
return mycmp(self.obj, other.obj) < 0
|
||||
|
||||
def __gt__(self, other):
|
||||
return mycmp(self.obj, other.obj) > 0
|
||||
|
||||
def __eq__(self, other):
|
||||
return mycmp(self.obj, other.obj) == 0
|
||||
|
||||
def __le__(self, other):
|
||||
return mycmp(self.obj, other.obj) <= 0
|
||||
|
||||
def __ge__(self, other):
|
||||
return mycmp(self.obj, other.obj) >= 0
|
||||
|
||||
def __ne__(self, other):
|
||||
return mycmp(self.obj, other.obj) != 0
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('hash not implemented')
|
||||
|
||||
return K
|
||||
|
||||
|
||||
def decode_list(ilist):
|
||||
""" Decodes the elements of a list if needed
|
||||
"""
|
||||
|
||||
dlist = []
|
||||
for elem in ilist:
|
||||
if not isinstance(elem, str):
|
||||
dlist.append(decode_it(elem))
|
||||
else:
|
||||
dlist.append(elem)
|
||||
return dlist
|
||||
|
||||
|
||||
def decode_it(obj):
|
||||
""" Decodes the given object if obj is not a string
|
||||
based on the chardet module if possible
|
||||
"""
|
||||
|
||||
if isinstance(obj, str):
|
||||
return obj
|
||||
else:
|
||||
try:
|
||||
import chardet
|
||||
return obj.decode(chardet.detect(obj)['encoding'])
|
||||
except:
|
||||
import locale
|
||||
return obj.decode(locale.getlocale()[1])
|
@ -177,13 +177,13 @@ class RepoDataQueryResult(osc.util.packagequery.PackageQueryResult):
|
||||
return None
|
||||
|
||||
def vercmp(self, other):
|
||||
res = osc.util.rpmquery.RpmQuery.rpmvercmp(str(self.epoch()), str(other.epoch()))
|
||||
res = osc.util.rpmquery.RpmQuery.rpmvercmp(str(self.epoch()).encode(), str(other.epoch()).encode())
|
||||
if res != 0:
|
||||
return res
|
||||
res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.version(), other.version())
|
||||
res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.version().encode(), other.version().encode())
|
||||
if res != 0:
|
||||
return res
|
||||
res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.release(), other.release())
|
||||
res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.release().encode(), other.release().encode())
|
||||
return res
|
||||
|
||||
def version(self):
|
||||
|
@ -5,6 +5,10 @@ import os
|
||||
import re
|
||||
import struct
|
||||
from . import packagequery
|
||||
from osc.util.helper import decode_it
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
class RpmError(packagequery.PackageError):
|
||||
pass
|
||||
@ -184,14 +188,14 @@ class RpmQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
|
||||
continue
|
||||
# RPMSENSE_SENSEMASK = 15 (see rpmlib.h) but ignore RPMSENSE_SERIAL (= 1 << 0) therefore use 14
|
||||
if flags & 14:
|
||||
name += ' '
|
||||
name += b' '
|
||||
if flags & self.GREATER:
|
||||
name += '>'
|
||||
name += b'>'
|
||||
elif flags & self.LESS:
|
||||
name += '<'
|
||||
name += b'<'
|
||||
if flags & self.EQUAL:
|
||||
name += '='
|
||||
name += ' %s' % ver
|
||||
name += b'='
|
||||
name += b' %s' % ver
|
||||
res.append(name)
|
||||
return res
|
||||
|
||||
@ -288,7 +292,7 @@ class RpmQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
|
||||
arch = 'src'
|
||||
else:
|
||||
arch = self.arch()
|
||||
return RpmQuery.filename(self.name(), None, self.version(), self.release(), arch)
|
||||
return RpmQuery.filename(decode_it(self.name()), None, decode_it(self.version()), decode_it(self.release()), decode_it(arch))
|
||||
|
||||
@staticmethod
|
||||
def query(filename):
|
||||
@ -318,6 +322,8 @@ class RpmQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
|
||||
if ver1 == ver2:
|
||||
return 0
|
||||
res = 0
|
||||
ver1 = decode_it(ver1)
|
||||
ver2 = decode_it(ver2)
|
||||
while res == 0:
|
||||
# remove all leading non alphanumeric or tilde chars
|
||||
ver1 = re.sub('^[^a-zA-Z0-9~]*', '', ver1)
|
||||
|
18
setup.py
18
setup.py
@ -2,11 +2,13 @@
|
||||
|
||||
from distutils.core import setup
|
||||
import distutils.core
|
||||
import distutils.command.build
|
||||
import distutils.command.install_data
|
||||
from distutils.command import build, install_data
|
||||
import os.path
|
||||
import osc.core
|
||||
import sys
|
||||
|
||||
import setuptools
|
||||
|
||||
from osc import commandline
|
||||
from osc import babysitter
|
||||
# optional support for py2exe
|
||||
@ -17,7 +19,7 @@ except:
|
||||
HAVE_PY2EXE = False
|
||||
|
||||
|
||||
class build_osc(distutils.command.build.build, object):
|
||||
class build_osc(build.build, object):
|
||||
"""
|
||||
Custom build command which generates man page.
|
||||
"""
|
||||
@ -28,7 +30,7 @@ class build_osc(distutils.command.build.build, object):
|
||||
import gzip
|
||||
man_path = os.path.join(self.build_base, 'osc.1.gz')
|
||||
distutils.log.info('generating %s' % man_path)
|
||||
outfile = gzip.open(man_path, 'w')
|
||||
outfile = gzip.open(man_path, 'wt')
|
||||
osccli = commandline.Osc(stdout=outfile)
|
||||
# FIXME: we cannot call the main method because osc expects an ~/.oscrc
|
||||
# file (this would break builds in environments like the obs)
|
||||
@ -60,15 +62,15 @@ class build_docs(distutils.core.Command):
|
||||
src_dir = (self.distribution.package_dir or {'': ''})['']
|
||||
src_dir = os.path.join(os.getcwd(), src_dir)
|
||||
import sphinx
|
||||
sphinx.main(['runme',
|
||||
'-D', 'version=%s' % metadata.get_version(),
|
||||
sphinx.main(['runme',
|
||||
'-D', 'version=%s' % metadata.get_version(),
|
||||
os.path.join('docs',), os.path.join(self.built_docs, 'docs')])
|
||||
|
||||
|
||||
# take a potential build-base option into account (for instance, if osc is
|
||||
# build and installed like this:
|
||||
# python setup.py build --build-base=<dir> ... install ...)
|
||||
class install_data(distutils.command.install_data.install_data, object):
|
||||
class install_data(install_data.install_data, object):
|
||||
def initialize_options(self):
|
||||
super(install_data, self).initialize_options()
|
||||
self.built_data = None
|
||||
@ -97,7 +99,7 @@ data_files = []
|
||||
if sys.platform[:3] != 'win':
|
||||
data_files.append((os.path.join('share', 'man', 'man1'), ['osc.1.gz']))
|
||||
|
||||
setup(name='osc',
|
||||
setuptools.setup(name='osc',
|
||||
version = osc.core.__version__,
|
||||
description = 'openSUSE commander',
|
||||
long_description = 'Command-line client for the openSUSE Build Service, which allows to access repositories in the openSUSE Build Service in similar way as Subversion repositories.',
|
||||
|
@ -24,6 +24,7 @@ import test_setlinkrev
|
||||
import test_prdiff
|
||||
import test_conf
|
||||
import test_results
|
||||
import test_helpers
|
||||
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTests(test_addfiles.suite())
|
||||
@ -42,6 +43,7 @@ suite.addTests(test_setlinkrev.suite())
|
||||
suite.addTests(test_prdiff.suite())
|
||||
suite.addTests(test_conf.suite())
|
||||
suite.addTests(test_results.suite())
|
||||
suite.addTests(test_helpers.suite())
|
||||
|
||||
if have_xmlrunner:
|
||||
result = xmlrunner.XMLTestRunner(output=os.path.join(os.getcwd(), 'junit-xml-results')).run(suite)
|
||||
|
35
tests/test_helpers.py
Normal file
35
tests/test_helpers.py
Normal file
@ -0,0 +1,35 @@
|
||||
import unittest
|
||||
from osc.util.helper import decode_it, decode_list
|
||||
|
||||
def suite():
|
||||
return unittest.makeSuite(TestResults)
|
||||
|
||||
class TestResults(unittest.TestCase):
|
||||
def testDecodeList(self):
|
||||
strlist = ['Test1', 'Test2', 'Test3']
|
||||
mixlist = ['Test1', b'Test2', 'Test3']
|
||||
byteslist = [b'Test1', b'Test2', b'Test3']
|
||||
|
||||
out = decode_list(strlist)
|
||||
self.assertListEqual(out, strlist)
|
||||
|
||||
out = decode_list(mixlist)
|
||||
self.assertListEqual(out, strlist)
|
||||
|
||||
out = decode_list(byteslist)
|
||||
self.assertListEqual(out, strlist)
|
||||
|
||||
|
||||
def testDecodeIt(self):
|
||||
bytes_obj = b'Test the decoding'
|
||||
string_obj = 'Test the decoding'
|
||||
|
||||
out = decode_it(bytes_obj)
|
||||
self.assertEqual(out, string_obj)
|
||||
|
||||
out = decode_it(string_obj)
|
||||
self.assertEqual(out, string_obj)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
Loading…
Reference in New Issue
Block a user