mirror of
https://github.com/openSUSE/osc.git
synced 2025-08-23 06:48:51 +02:00
python3 compatibility: make all unit test pass
There are many places can't be covered by 2to3, especially the str/unicode -> str/bytes change done in python3. This is a big patch incorporating all changes made in order to make python3 suite.py run without any single failure. It * adapt the introspect_handler_3 for case there are no __defaults__ * adds the ET_ENCODING variable for ET.fromstring ("unicode" in py3, "utf-8" in py2) * (re)adds various builtins to both python versions - memoryview to python 2.6 - bytes compatible with py3 to 2.6 and 2.7 and it changes few parts of tests/common.py in order to be compatible with python3 * new urlcompare method compares all components or url + parsed query string in a dictionary, so the ordering, neither quoting does not matter * bytes builtin has been added to 2.x and used in assertEqualMultiline
This commit is contained in:
@@ -64,8 +64,13 @@ def introspect_handler_2(handler):
|
|||||||
func
|
func
|
||||||
|
|
||||||
def introspect_handler_3(handler):
|
def introspect_handler_3(handler):
|
||||||
|
defaults = handler.__defaults__
|
||||||
|
if not defaults:
|
||||||
|
defaults = []
|
||||||
|
else:
|
||||||
|
defaults = list(handler.__defaults__)
|
||||||
return \
|
return \
|
||||||
list(handler.__defaults__), \
|
defaults, \
|
||||||
handler.__code__.co_argcount, \
|
handler.__code__.co_argcount, \
|
||||||
handler.__code__.co_varnames, \
|
handler.__code__.co_varnames, \
|
||||||
handler.__code__.co_flags, \
|
handler.__code__.co_flags, \
|
||||||
|
@@ -13,10 +13,12 @@ import time
|
|||||||
try:
|
try:
|
||||||
from urllib.parse import urlsplit
|
from urllib.parse import urlsplit
|
||||||
from urllib.error import HTTPError
|
from urllib.error import HTTPError
|
||||||
|
ET_ENCODING = "unicode"
|
||||||
except ImportError:
|
except ImportError:
|
||||||
#python 2.x
|
#python 2.x
|
||||||
from urlparse import urlsplit
|
from urlparse import urlsplit
|
||||||
from urllib2 import HTTPError
|
from urllib2 import HTTPError
|
||||||
|
ET_ENCODING = "utf-8"
|
||||||
|
|
||||||
from optparse import SUPPRESS_HELP
|
from optparse import SUPPRESS_HELP
|
||||||
|
|
||||||
@@ -3541,10 +3543,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
|||||||
stdin=subprocess.PIPE,
|
stdin=subprocess.PIPE,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
close_fds=True)
|
close_fds=True)
|
||||||
p.stdin.write(rdiff)
|
p.stdin.write(rdiff.encode())
|
||||||
p.stdin.close()
|
p.stdin.close()
|
||||||
diffstat = "".join(p.stdout.readlines())
|
print("".join(x.decode() for x in p.stdout.readlines()))
|
||||||
print(diffstat)
|
|
||||||
elif opts.unified:
|
elif opts.unified:
|
||||||
print()
|
print()
|
||||||
print(rdiff)
|
print(rdiff)
|
||||||
@@ -6669,7 +6670,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
|||||||
data.find('title').text = ''.join(title)
|
data.find('title').text = ''.join(title)
|
||||||
data.find('description').text = ''.join(descr)
|
data.find('description').text = ''.join(descr)
|
||||||
data.find('url').text = url
|
data.find('url').text = url
|
||||||
data = ET.tostring(data)
|
data = ET.tostring(data, encoding=ET_ENCODING)
|
||||||
else:
|
else:
|
||||||
print('error - cannot get meta data', file=sys.stderr)
|
print('error - cannot get meta data', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
88
osc/core.py
88
osc/core.py
@@ -45,18 +45,24 @@ except ImportError:
|
|||||||
from . import oscerr
|
from . import oscerr
|
||||||
from . import conf
|
from . import conf
|
||||||
|
|
||||||
# python 2.6 don't have memoryview
|
# python 2.6 don't have memoryview, neither bytes
|
||||||
try:
|
try:
|
||||||
memoryview
|
memoryview
|
||||||
except NameError:
|
except NameError:
|
||||||
memoryview = buffer
|
memoryview = buffer
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# python 2.6 and python 2.7
|
||||||
unicode
|
unicode
|
||||||
|
ET_ENCODING = "utf-8"
|
||||||
|
# python 2.6 does not have bytes and python 2.7 reimplements it as alias to
|
||||||
|
# str, but in incompatible way as it does not accept the same arguments
|
||||||
|
bytes = lambda x, *args: x
|
||||||
except:
|
except:
|
||||||
#python3 does not have unicode, so lets reimplement it
|
#python3 does not have unicode, so lets reimplement it
|
||||||
#as void function as it already gets unicode strings
|
#as void function as it already gets unicode strings
|
||||||
unicode = lambda x, *args: x
|
unicode = lambda x, *args: x
|
||||||
|
ET_ENCODING = "unicode"
|
||||||
|
|
||||||
DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
|
DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
|
||||||
BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
|
BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
|
||||||
@@ -286,7 +292,7 @@ class Serviceinfo:
|
|||||||
data['command'] = name
|
data['command'] = name
|
||||||
self.services.append(data)
|
self.services.append(data)
|
||||||
except:
|
except:
|
||||||
msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node)
|
msg = 'invalid service format:\n%s' % ET.tostring(serviceinfo_node, encoding=ET_ENCODING)
|
||||||
raise oscerr.APIError(msg)
|
raise oscerr.APIError(msg)
|
||||||
|
|
||||||
def getProjectGlobalServices(self, apiurl, project, package):
|
def getProjectGlobalServices(self, apiurl, project, package):
|
||||||
@@ -721,7 +727,7 @@ class Project:
|
|||||||
|
|
||||||
def write_packages(self):
|
def write_packages(self):
|
||||||
xmlindent(self.pac_root)
|
xmlindent(self.pac_root)
|
||||||
store_write_string(self.absdir, '_packages', ET.tostring(self.pac_root))
|
store_write_string(self.absdir, '_packages', ET.tostring(self.pac_root, encoding=ET_ENCODING))
|
||||||
|
|
||||||
def addPackage(self, pac):
|
def addPackage(self, pac):
|
||||||
import fnmatch
|
import fnmatch
|
||||||
@@ -1274,7 +1280,7 @@ class Package:
|
|||||||
if self.islinkrepair():
|
if self.islinkrepair():
|
||||||
query['repairlink'] = '1'
|
query['repairlink'] = '1'
|
||||||
u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
|
||||||
f = http_POST(u, data=ET.tostring(local_filelist))
|
f = http_POST(u, data=ET.tostring(local_filelist, encoding=ET_ENCODING))
|
||||||
root = ET.parse(f).getroot()
|
root = ET.parse(f).getroot()
|
||||||
return root
|
return root
|
||||||
|
|
||||||
@@ -1290,7 +1296,7 @@ class Package:
|
|||||||
for n in server_filelist.findall('entry'):
|
for n in server_filelist.findall('entry'):
|
||||||
name = n.get('name')
|
name = n.get('name')
|
||||||
if name is None:
|
if name is None:
|
||||||
raise oscerr.APIError('missing \'name\' attribute:\n%s\n' % ET.tostring(server_filelist))
|
raise oscerr.APIError('missing \'name\' attribute:\n%s\n' % ET.tostring(server_filelist, encoding=ET_ENCODING))
|
||||||
todo.append(n.get('name'))
|
todo.append(n.get('name'))
|
||||||
return todo
|
return todo
|
||||||
|
|
||||||
@@ -1372,7 +1378,7 @@ class Package:
|
|||||||
if len(send):
|
if len(send):
|
||||||
raise oscerr.PackageInternalError(self.prjname, self.name,
|
raise oscerr.PackageInternalError(self.prjname, self.name,
|
||||||
'server does not accept filelist:\n%s\nmissing:\n%s\n' \
|
'server does not accept filelist:\n%s\nmissing:\n%s\n' \
|
||||||
% (ET.tostring(filelist), ET.tostring(sfilelist)))
|
% (ET.tostring(filelist, encoding=ET_ENCODING), ET.tostring(sfilelist, encoding=ET_ENCODING)))
|
||||||
# these files already exist on the server
|
# these files already exist on the server
|
||||||
# just copy them into the storedir
|
# just copy them into the storedir
|
||||||
for filename in real_send:
|
for filename in real_send:
|
||||||
@@ -1394,12 +1400,12 @@ class Package:
|
|||||||
li = Linkinfo()
|
li = Linkinfo()
|
||||||
li.read(sfilelist.find('linkinfo'))
|
li.read(sfilelist.find('linkinfo'))
|
||||||
if li.xsrcmd5 is None:
|
if li.xsrcmd5 is None:
|
||||||
raise oscerr.APIError('linkinfo has no xsrcmd5 attr:\n%s\n' % ET.tostring(sfilelist))
|
raise oscerr.APIError('linkinfo has no xsrcmd5 attr:\n%s\n' % ET.tostring(sfilelist, encoding=ET_ENCODING))
|
||||||
sfilelist = ET.fromstring(self.get_files_meta(revision=li.xsrcmd5))
|
sfilelist = ET.fromstring(self.get_files_meta(revision=li.xsrcmd5))
|
||||||
for i in sfilelist.findall('entry'):
|
for i in sfilelist.findall('entry'):
|
||||||
if i.get('name') in self.skipped:
|
if i.get('name') in self.skipped:
|
||||||
i.set('skipped', 'true')
|
i.set('skipped', 'true')
|
||||||
store_write_string(self.absdir, '_files', ET.tostring(sfilelist) + '\n')
|
store_write_string(self.absdir, '_files', ET.tostring(sfilelist, encoding=ET_ENCODING) + '\n')
|
||||||
for filename in todo_delete:
|
for filename in todo_delete:
|
||||||
self.to_be_deleted.remove(filename)
|
self.to_be_deleted.remove(filename)
|
||||||
self.delete_storefile(filename)
|
self.delete_storefile(filename)
|
||||||
@@ -1520,7 +1526,7 @@ class Package:
|
|||||||
if size and self.size_limit and int(size) > self.size_limit \
|
if size and self.size_limit and int(size) > self.size_limit \
|
||||||
or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
|
or skip_service and (e.get('name').startswith('_service:') or e.get('name').startswith('_service_')):
|
||||||
e.set('skipped', 'true')
|
e.set('skipped', 'true')
|
||||||
return ET.tostring(root)
|
return ET.tostring(root, encoding=ET_ENCODING)
|
||||||
|
|
||||||
def update_datastructs(self):
|
def update_datastructs(self):
|
||||||
"""
|
"""
|
||||||
@@ -1895,13 +1901,13 @@ rev: %s
|
|||||||
url.text = self.url
|
url.text = self.url
|
||||||
|
|
||||||
u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
|
u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
|
||||||
mf = metafile(u, ET.tostring(root))
|
mf = metafile(u, ET.tostring(root, encoding=ET_ENCODING))
|
||||||
|
|
||||||
if not force:
|
if not force:
|
||||||
print('*' * 36, 'old', '*' * 36)
|
print('*' * 36, 'old', '*' * 36)
|
||||||
print(m)
|
print(m)
|
||||||
print('*' * 36, 'new', '*' * 36)
|
print('*' * 36, 'new', '*' * 36)
|
||||||
print(ET.tostring(root))
|
print(ET.tostring(root, encoding=ET_ENCODING))
|
||||||
print('*' * 72)
|
print('*' * 72)
|
||||||
repl = raw_input('Write? (y/N/e) ')
|
repl = raw_input('Write? (y/N/e) ')
|
||||||
else:
|
else:
|
||||||
@@ -1949,7 +1955,7 @@ rev: %s
|
|||||||
def __get_files(self, fmeta_root):
|
def __get_files(self, fmeta_root):
|
||||||
f = []
|
f = []
|
||||||
if fmeta_root.get('rev') is None and len(fmeta_root.findall('entry')) > 0:
|
if fmeta_root.get('rev') is None and len(fmeta_root.findall('entry')) > 0:
|
||||||
raise oscerr.APIError('missing rev attribute in _files:\n%s' % ''.join(ET.tostring(fmeta_root)))
|
raise oscerr.APIError('missing rev attribute in _files:\n%s' % ''.join(ET.tostring(fmeta_root, encoding=ET_ENCODING)))
|
||||||
for i in fmeta_root.findall('entry'):
|
for i in fmeta_root.findall('entry'):
|
||||||
skipped = i.get('skipped') is not None
|
skipped = i.get('skipped') is not None
|
||||||
f.append(File(i.get('name'), i.get('md5'),
|
f.append(File(i.get('name'), i.get('md5'),
|
||||||
@@ -2042,7 +2048,7 @@ rev: %s
|
|||||||
deleted.remove(f)
|
deleted.remove(f)
|
||||||
if not service_files:
|
if not service_files:
|
||||||
services = []
|
services = []
|
||||||
self.__update(kept, added, deleted, services, ET.tostring(root), root.get('rev'))
|
self.__update(kept, added, deleted, services, ET.tostring(root, encoding=ET_ENCODING), root.get('rev'))
|
||||||
os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
|
os.unlink(os.path.join(self.storedir, '_in_update', '_files'))
|
||||||
os.rmdir(os.path.join(self.storedir, '_in_update'))
|
os.rmdir(os.path.join(self.storedir, '_in_update'))
|
||||||
# ok everything is ok (hopefully)...
|
# ok everything is ok (hopefully)...
|
||||||
@@ -2225,7 +2231,7 @@ class AbstractState:
|
|||||||
"""return "pretty" XML data"""
|
"""return "pretty" XML data"""
|
||||||
root = self.to_xml()
|
root = self.to_xml()
|
||||||
xmlindent(root)
|
xmlindent(root)
|
||||||
return ET.tostring(root)
|
return ET.tostring(root, encoding=ET_ENCODING)
|
||||||
|
|
||||||
|
|
||||||
class ReviewState(AbstractState):
|
class ReviewState(AbstractState):
|
||||||
@@ -2233,7 +2239,7 @@ class ReviewState(AbstractState):
|
|||||||
def __init__(self, review_node):
|
def __init__(self, review_node):
|
||||||
if not review_node.get('state'):
|
if not review_node.get('state'):
|
||||||
raise oscerr.APIError('invalid review node (state attr expected): %s' % \
|
raise oscerr.APIError('invalid review node (state attr expected): %s' % \
|
||||||
ET.tostring(review_node))
|
ET.tostring(review_node, encoding=ET_ENCODING))
|
||||||
AbstractState.__init__(self, review_node.tag)
|
AbstractState.__init__(self, review_node.tag)
|
||||||
self.state = review_node.get('state')
|
self.state = review_node.get('state')
|
||||||
self.by_user = review_node.get('by_user')
|
self.by_user = review_node.get('by_user')
|
||||||
@@ -2259,7 +2265,7 @@ class RequestState(AbstractState):
|
|||||||
def __init__(self, state_node):
|
def __init__(self, state_node):
|
||||||
if not state_node.get('name'):
|
if not state_node.get('name'):
|
||||||
raise oscerr.APIError('invalid request state node (name attr expected): %s' % \
|
raise oscerr.APIError('invalid request state node (name attr expected): %s' % \
|
||||||
ET.tostring(state_node))
|
ET.tostring(state_node, encoding=ET_ENCODING))
|
||||||
AbstractState.__init__(self, state_node.tag)
|
AbstractState.__init__(self, state_node.tag)
|
||||||
self.name = state_node.get('name')
|
self.name = state_node.get('name')
|
||||||
self.who = state_node.get('who')
|
self.who = state_node.get('who')
|
||||||
@@ -2355,7 +2361,7 @@ class Action:
|
|||||||
"""return "pretty" XML data"""
|
"""return "pretty" XML data"""
|
||||||
root = self.to_xml()
|
root = self.to_xml()
|
||||||
xmlindent(root)
|
xmlindent(root)
|
||||||
return ET.tostring(root)
|
return ET.tostring(root, encoding=ET_ENCODING)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_xml(action_node):
|
def from_xml(action_node):
|
||||||
@@ -2396,10 +2402,10 @@ class Request:
|
|||||||
"""read in a request"""
|
"""read in a request"""
|
||||||
self._init_attributes()
|
self._init_attributes()
|
||||||
if not root.get('id'):
|
if not root.get('id'):
|
||||||
raise oscerr.APIError('invalid request: %s\n' % ET.tostring(root))
|
raise oscerr.APIError('invalid request: %s\n' % ET.tostring(root, encoding=ET_ENCODING))
|
||||||
self.reqid = root.get('id')
|
self.reqid = root.get('id')
|
||||||
if root.find('state') is None:
|
if root.find('state') is None:
|
||||||
raise oscerr.APIError('invalid request (state expected): %s\n' % ET.tostring(root))
|
raise oscerr.APIError('invalid request (state expected): %s\n' % ET.tostring(root, encoding=ET_ENCODING))
|
||||||
self.state = RequestState(root.find('state'))
|
self.state = RequestState(root.find('state'))
|
||||||
action_nodes = root.findall('action')
|
action_nodes = root.findall('action')
|
||||||
if not action_nodes:
|
if not action_nodes:
|
||||||
@@ -2460,7 +2466,7 @@ class Request:
|
|||||||
"""return "pretty" XML data"""
|
"""return "pretty" XML data"""
|
||||||
root = self.to_xml()
|
root = self.to_xml()
|
||||||
xmlindent(root)
|
xmlindent(root)
|
||||||
return ET.tostring(root)
|
return ET.tostring(root, encoding=ET_ENCODING)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def format_review(review, show_srcupdate=False):
|
def format_review(review, show_srcupdate=False):
|
||||||
@@ -2926,7 +2932,10 @@ def http_request(method, url, headers={}, data=None, file=None, timeout=100):
|
|||||||
if old_timeout != timeout and not api_host_options.get('sslcertck'):
|
if old_timeout != timeout and not api_host_options.get('sslcertck'):
|
||||||
socket.setdefaulttimeout(timeout)
|
socket.setdefaulttimeout(timeout)
|
||||||
try:
|
try:
|
||||||
|
if isinstance(data, str):
|
||||||
|
data=bytes(data, "utf-8")
|
||||||
fd = urlopen(req, data=data)
|
fd = urlopen(req, data=data)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if old_timeout != timeout and not api_host_options.get('sslcertck'):
|
if old_timeout != timeout and not api_host_options.get('sslcertck'):
|
||||||
socket.setdefaulttimeout(old_timeout)
|
socket.setdefaulttimeout(old_timeout)
|
||||||
@@ -3194,7 +3203,6 @@ class metafile:
|
|||||||
print('discarding %s' % self.filename)
|
print('discarding %s' % self.filename)
|
||||||
os.unlink(self.filename)
|
os.unlink(self.filename)
|
||||||
|
|
||||||
|
|
||||||
# different types of metadata
|
# different types of metadata
|
||||||
metatypes = { 'prj': { 'path': 'source/%s/_meta',
|
metatypes = { 'prj': { 'path': 'source/%s/_meta',
|
||||||
'template': new_project_templ,
|
'template': new_project_templ,
|
||||||
@@ -3539,7 +3547,7 @@ def clone_request(apiurl, reqid, msg=None):
|
|||||||
if i.get('name') == 'targetproject':
|
if i.get('name') == 'targetproject':
|
||||||
project = i.text.strip()
|
project = i.text.strip()
|
||||||
if not project:
|
if not project:
|
||||||
raise oscerr.APIError('invalid data from clone request:\n%s\n' % ET.tostring(root))
|
raise oscerr.APIError('invalid data from clone request:\n%s\n' % ET.tostring(root, encoding=ET_ENCODING))
|
||||||
return project
|
return project
|
||||||
|
|
||||||
# create a maintenance release request
|
# create a maintenance release request
|
||||||
@@ -3950,7 +3958,7 @@ def download(url, filename, progress_obj = None, mtime = None):
|
|||||||
try:
|
try:
|
||||||
o = os.fdopen(fd, 'wb')
|
o = os.fdopen(fd, 'wb')
|
||||||
for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
|
for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj):
|
||||||
o.write(buf)
|
o.write(bytes(buf,"utf-8"))
|
||||||
o.close()
|
o.close()
|
||||||
os.rename(tmpfile, filename)
|
os.rename(tmpfile, filename)
|
||||||
except:
|
except:
|
||||||
@@ -4027,7 +4035,7 @@ def dgst(file):
|
|||||||
|
|
||||||
def binary(s):
|
def binary(s):
|
||||||
"""return true if a string is binary data using diff's heuristic"""
|
"""return true if a string is binary data using diff's heuristic"""
|
||||||
if s and '\0' in s[:4096]:
|
if s and bytes('\0', "utf-8") in s[:4096]:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -4065,11 +4073,11 @@ def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None,
|
|||||||
|
|
||||||
f1 = f2 = None
|
f1 = f2 = None
|
||||||
try:
|
try:
|
||||||
f1 = open(file1, 'rb')
|
f1 = open(file1, 'rt')
|
||||||
s1 = f1.readlines()
|
s1 = f1.readlines()
|
||||||
f1.close()
|
f1.close()
|
||||||
|
|
||||||
f2 = open(file2, 'rb')
|
f2 = open(file2, 'rt')
|
||||||
s2 = f2.readlines()
|
s2 = f2.readlines()
|
||||||
f2.close()
|
f2.close()
|
||||||
finally:
|
finally:
|
||||||
@@ -4340,7 +4348,7 @@ def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
|
|||||||
if not keep_develproject:
|
if not keep_develproject:
|
||||||
for dp in root.findall('devel'):
|
for dp in root.findall('devel'):
|
||||||
root.remove(dp)
|
root.remove(dp)
|
||||||
return ET.tostring(root)
|
return ET.tostring(root, encoding=ET_ENCODING)
|
||||||
|
|
||||||
def link_to_branch(apiurl, project, package):
|
def link_to_branch(apiurl, project, package):
|
||||||
"""
|
"""
|
||||||
@@ -4389,7 +4397,7 @@ def link_pac(src_project, src_package, dst_project, dst_package, force, rev='',
|
|||||||
elm = ET.SubElement(root, 'publish')
|
elm = ET.SubElement(root, 'publish')
|
||||||
elm.clear()
|
elm.clear()
|
||||||
ET.SubElement(elm, 'disable')
|
ET.SubElement(elm, 'disable')
|
||||||
dst_meta = ET.tostring(root)
|
dst_meta = ET.tostring(root, encoding=ET_ENCODING)
|
||||||
|
|
||||||
if meta_change:
|
if meta_change:
|
||||||
edit_meta('pkg',
|
edit_meta('pkg',
|
||||||
@@ -4481,7 +4489,7 @@ def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map =
|
|||||||
elm = ET.SubElement(root, 'publish')
|
elm = ET.SubElement(root, 'publish')
|
||||||
elm.clear()
|
elm.clear()
|
||||||
ET.SubElement(elm, 'disable')
|
ET.SubElement(elm, 'disable')
|
||||||
dst_meta = ET.tostring(root)
|
dst_meta = ET.tostring(root, encoding=ET_ENCODING)
|
||||||
if meta_change:
|
if meta_change:
|
||||||
edit_meta('pkg',
|
edit_meta('pkg',
|
||||||
path_args=(dst_project, dst_package),
|
path_args=(dst_project, dst_package),
|
||||||
@@ -4565,7 +4573,7 @@ def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute,
|
|||||||
return root
|
return root
|
||||||
# TODO: change api here and return parsed XML as class
|
# TODO: change api here and return parsed XML as class
|
||||||
if conf.config['http_debug']:
|
if conf.config['http_debug']:
|
||||||
print(ET.tostring(root), file=sys.stderr)
|
print(ET.tostring(root, encoding=ET_ENCODING), file=sys.stderr)
|
||||||
for node in root.findall('data'):
|
for node in root.findall('data'):
|
||||||
r = node.get('name')
|
r = node.get('name')
|
||||||
if r and r == 'targetproject':
|
if r and r == 'targetproject':
|
||||||
@@ -4610,7 +4618,7 @@ def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None,
|
|||||||
root = ET.fromstring(e.read())
|
root = ET.fromstring(e.read())
|
||||||
summary = root.find('summary')
|
summary = root.find('summary')
|
||||||
if summary is None:
|
if summary is None:
|
||||||
raise oscerr.APIError('unexpected response:\n%s' % ET.tostring(root))
|
raise oscerr.APIError('unexpected response:\n%s' % ET.tostring(root, encoding=ET_ENCODING))
|
||||||
m = re.match(r"branch target package already exists: (\S+)/(\S+)", summary.text)
|
m = re.match(r"branch target package already exists: (\S+)/(\S+)", summary.text)
|
||||||
if not m:
|
if not m:
|
||||||
e.msg += '\n' + summary.text
|
e.msg += '\n' + summary.text
|
||||||
@@ -4618,7 +4626,7 @@ def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None,
|
|||||||
return (True, m.group(1), m.group(2), None, None)
|
return (True, m.group(1), m.group(2), None, None)
|
||||||
|
|
||||||
if conf.config['http_debug']:
|
if conf.config['http_debug']:
|
||||||
print(ET.tostring(root), file=sys.stderr)
|
print(ET.tostring(root, encoding=ET_ENCODING), file=sys.stderr)
|
||||||
data = {}
|
data = {}
|
||||||
for i in ET.fromstring(f.read()).findall('data'):
|
for i in ET.fromstring(f.read()).findall('data'):
|
||||||
data[i.get('name')] = i.text
|
data[i.get('name')] = i.text
|
||||||
@@ -5839,7 +5847,7 @@ def set_link_rev(apiurl, project, package, revision='', expand=False, baserev=Fa
|
|||||||
if vrev and revision and len(revision) >= 32:
|
if vrev and revision and len(revision) >= 32:
|
||||||
root.set('vrev', vrev)
|
root.set('vrev', vrev)
|
||||||
|
|
||||||
l = ET.tostring(root)
|
l = ET.tostring(root, encoding=ET_ENCODING)
|
||||||
http_PUT(url, data=l)
|
http_PUT(url, data=l)
|
||||||
return revision
|
return revision
|
||||||
|
|
||||||
@@ -5944,7 +5952,7 @@ def addPerson(apiurl, prj, pac, user, role="maintainer"):
|
|||||||
print('user \'%s\' added to \'%s\'' % (user, pac or prj))
|
print('user \'%s\' added to \'%s\'' % (user, pac or prj))
|
||||||
edit_meta(metatype=kind,
|
edit_meta(metatype=kind,
|
||||||
path_args=path,
|
path_args=path,
|
||||||
data=ET.tostring(root))
|
data=ET.tostring(root, encoding=ET_ENCODING))
|
||||||
else:
|
else:
|
||||||
print("osc: an error occured")
|
print("osc: an error occured")
|
||||||
|
|
||||||
@@ -5974,7 +5982,7 @@ def delPerson(apiurl, prj, pac, user, role="maintainer"):
|
|||||||
if found:
|
if found:
|
||||||
edit_meta(metatype=kind,
|
edit_meta(metatype=kind,
|
||||||
path_args=path,
|
path_args=path,
|
||||||
data=ET.tostring(root))
|
data=ET.tostring(root, encoding=ET_ENCODING))
|
||||||
else:
|
else:
|
||||||
print("user \'%s\' not found in \'%s\'" % (user, pac or prj))
|
print("user \'%s\' not found in \'%s\'" % (user, pac or prj))
|
||||||
else:
|
else:
|
||||||
@@ -6007,7 +6015,7 @@ def setBugowner(apiurl, prj, pac, user=None, group=None):
|
|||||||
print("Neither user nor group is specified")
|
print("Neither user nor group is specified")
|
||||||
edit_meta(metatype=kind,
|
edit_meta(metatype=kind,
|
||||||
path_args=path,
|
path_args=path,
|
||||||
data=ET.tostring(root))
|
data=ET.tostring(root, encoding=ET_ENCODING))
|
||||||
|
|
||||||
def setDevelProject(apiurl, prj, pac, dprj, dpkg=None):
|
def setDevelProject(apiurl, prj, pac, dprj, dpkg=None):
|
||||||
""" set the <devel project="..."> element to package metadata"""
|
""" set the <devel project="..."> element to package metadata"""
|
||||||
@@ -6034,7 +6042,7 @@ def setDevelProject(apiurl, prj, pac, dprj, dpkg=None):
|
|||||||
del elem.attrib['package']
|
del elem.attrib['package']
|
||||||
edit_meta(metatype='pkg',
|
edit_meta(metatype='pkg',
|
||||||
path_args=path,
|
path_args=path,
|
||||||
data=ET.tostring(root))
|
data=ET.tostring(root, encoding=ET_ENCODING))
|
||||||
else:
|
else:
|
||||||
print("osc: an error occured")
|
print("osc: an error occured")
|
||||||
|
|
||||||
@@ -6084,7 +6092,7 @@ def addGitSource(url):
|
|||||||
# for pretty output
|
# for pretty output
|
||||||
xmlindent(s)
|
xmlindent(s)
|
||||||
f = open(service_file, 'wb')
|
f = open(service_file, 'wb')
|
||||||
f.write(ET.tostring(s))
|
f.write(ET.tostring(s, encoding=ET_ENCODING))
|
||||||
f.close()
|
f.close()
|
||||||
if addfile:
|
if addfile:
|
||||||
addFiles( ['_service'] )
|
addFiles( ['_service'] )
|
||||||
@@ -6105,7 +6113,7 @@ def addDownloadUrlService(url):
|
|||||||
# for pretty output
|
# for pretty output
|
||||||
xmlindent(s)
|
xmlindent(s)
|
||||||
f = open(service_file, 'wb')
|
f = open(service_file, 'wb')
|
||||||
f.write(ET.tostring(s))
|
f.write(ET.tostring(s, encoding=ET_ENCODING))
|
||||||
f.close()
|
f.close()
|
||||||
if addfile:
|
if addfile:
|
||||||
addFiles( ['_service'] )
|
addFiles( ['_service'] )
|
||||||
@@ -6127,7 +6135,7 @@ def addDownloadUrlService(url):
|
|||||||
# for pretty output
|
# for pretty output
|
||||||
xmlindent(s)
|
xmlindent(s)
|
||||||
f = open(service_file, 'wb')
|
f = open(service_file, 'wb')
|
||||||
f.write(ET.tostring(s))
|
f.write(ET.tostring(s, encoding=ET_ENCODING))
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
@@ -7,12 +7,41 @@ import sys
|
|||||||
from xml.etree import cElementTree as ET
|
from xml.etree import cElementTree as ET
|
||||||
EXPECTED_REQUESTS = []
|
EXPECTED_REQUESTS = []
|
||||||
|
|
||||||
|
if sys.version_info[0:2] in ((2, 6), (2, 7)):
|
||||||
|
bytes = lambda x, *args: x
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
#python 2.x
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
from urllib2 import HTTPHandler, addinfourl, build_opener
|
from urllib2 import HTTPHandler, addinfourl, build_opener
|
||||||
|
from urlparse import urlparse, parse_qs
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from urllib.request import HTTPHandler, addinfourl, build_opener
|
from urllib.request import HTTPHandler, addinfourl, build_opener
|
||||||
|
from urllib.parse import urlparse, parse_qs
|
||||||
|
|
||||||
|
def urlcompare(url, *args):
|
||||||
|
"""compare all components of url except query string - it is converted to
|
||||||
|
dict, therefor different ordering does not makes url's different, as well
|
||||||
|
as quoting of a query string"""
|
||||||
|
|
||||||
|
components = urlparse(url)
|
||||||
|
query_args = parse_qs(components.query)
|
||||||
|
components = components._replace(query=None)
|
||||||
|
|
||||||
|
if not args:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for url in args:
|
||||||
|
components2 = urlparse(url)
|
||||||
|
query_args2 = parse_qs(components2.query)
|
||||||
|
components2 = components2._replace(query=None)
|
||||||
|
|
||||||
|
if components != components2 or \
|
||||||
|
query_args != query_args2:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
class RequestWrongOrder(Exception):
|
class RequestWrongOrder(Exception):
|
||||||
"""raised if an unexpected request is issued to urllib2"""
|
"""raised if an unexpected request is issued to urllib2"""
|
||||||
@@ -44,7 +73,7 @@ class MyHTTPHandler(HTTPHandler):
|
|||||||
|
|
||||||
def http_open(self, req):
|
def http_open(self, req):
|
||||||
r = self.__exp_requests.pop(0)
|
r = self.__exp_requests.pop(0)
|
||||||
if req.get_full_url() != r[1] or req.get_method() != r[0]:
|
if not urlcompare(req.get_full_url(), r[1]) or req.get_method() != r[0]:
|
||||||
raise RequestWrongOrder(req.get_full_url(), r[1], req.get_method(), r[0])
|
raise RequestWrongOrder(req.get_full_url(), r[1], req.get_method(), r[0])
|
||||||
if req.get_method() in ('GET', 'DELETE'):
|
if req.get_method() in ('GET', 'DELETE'):
|
||||||
return self.__mock_GET(r[1], **r[2])
|
return self.__mock_GET(r[1], **r[2])
|
||||||
@@ -63,7 +92,7 @@ class MyHTTPHandler(HTTPHandler):
|
|||||||
elif exp is None:
|
elif exp is None:
|
||||||
raise RuntimeError('exp or expfile required')
|
raise RuntimeError('exp or expfile required')
|
||||||
if exp is not None:
|
if exp is not None:
|
||||||
if req.get_data() != exp:
|
if req.get_data() != bytes(exp, "utf-8"):
|
||||||
raise RequestDataMismatch(req.get_full_url(), repr(req.get_data()), repr(exp))
|
raise RequestDataMismatch(req.get_full_url(), repr(req.get_data()), repr(exp))
|
||||||
return self.__get_response(req.get_full_url(), **kwargs)
|
return self.__get_response(req.get_full_url(), **kwargs)
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user