mirror of
https://github.com/openSUSE/osc.git
synced 2025-02-03 18:16:17 +01:00
python3 compatibility: urllib
fixes all renames in urllib, urllib2, urlparse modules in python3
This commit is contained in:
parent
f6edc186c2
commit
419367fca3
@ -33,10 +33,11 @@ except:
|
||||
|
||||
try:
|
||||
from http.client import HTTPException, BadStatusLine
|
||||
from urllib.error import URLError, HTTPError
|
||||
except ImportError:
|
||||
#python 2.x
|
||||
from httplib import HTTPException, BadStatusLine
|
||||
from urllib2 import URLError, HTTPError
|
||||
from urllib2 import URLError, HTTPError
|
||||
|
||||
# the good things are stolen from Matt Mackall's mercurial
|
||||
|
||||
|
@ -9,11 +9,15 @@ import os
|
||||
import re
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
try:
|
||||
from urllib.parse import urlsplit
|
||||
from urllib.request import URLError, HTTPError
|
||||
except ImportError:
|
||||
#python 2.x
|
||||
from urlparse import urlsplit
|
||||
from urllib2 import URLError, HTTPError
|
||||
|
||||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
from osc.fetch import *
|
||||
from osc.core import get_buildinfo, store_read_apiurl, store_read_project, store_read_package, meta_exists, quote_plus, get_buildconfig, is_package_dir
|
||||
@ -100,7 +104,7 @@ class Buildinfo:
|
||||
sys.exit(1)
|
||||
|
||||
if not (apiurl.startswith('https://') or apiurl.startswith('http://')):
|
||||
raise urllib2.URLError('invalid protocol for the apiurl: \'%s\'' % apiurl)
|
||||
raise URLError('invalid protocol for the apiurl: \'%s\'' % apiurl)
|
||||
|
||||
self.buildtype = buildtype
|
||||
self.apiurl = apiurl
|
||||
@ -609,7 +613,7 @@ def main(apiurl, opts, argv):
|
||||
bc_file = open(bc_filename, 'w')
|
||||
bc_file.write(bc)
|
||||
bc_file.flush()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
# check what caused the 404
|
||||
if meta_exists(metatype='prj', path_args=(quote_plus(prj), ),
|
||||
|
@ -12,9 +12,11 @@ import sys
|
||||
import time
|
||||
try:
|
||||
from urllib.parse import urlsplit
|
||||
from urllib.error import HTTPError
|
||||
except ImportError:
|
||||
#python 2.x
|
||||
from urlparse import urlsplit
|
||||
from urllib2 import HTTPError
|
||||
|
||||
from optparse import SUPPRESS_HELP
|
||||
|
||||
@ -475,7 +477,7 @@ class Osc(cmdln.Cmdln):
|
||||
if patchinfo:
|
||||
try:
|
||||
filelist = meta_get_filelist(apiurl, project, patchinfo)
|
||||
except urllib2.HTTPError:
|
||||
except HTTPError:
|
||||
pass
|
||||
|
||||
if opts.force or not filelist or not '_patchinfo' in filelist:
|
||||
@ -1060,7 +1062,7 @@ class Osc(cmdln.Cmdln):
|
||||
devloc = None
|
||||
try:
|
||||
devloc = show_develproject(apiurl, dst_project, dst_package)
|
||||
except urllib2.HTTPError:
|
||||
except HTTPError:
|
||||
print("""\
|
||||
Warning: failed to fetch meta data for '%s' package '%s' (new package?) """ \
|
||||
% (dst_project, dst_package), file=sys.stderr)
|
||||
@ -1272,7 +1274,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
devloc = None
|
||||
try:
|
||||
devloc = show_develproject(apiurl, dst_project, dst_package)
|
||||
except urllib2.HTTPError:
|
||||
except HTTPError:
|
||||
print("""\
|
||||
Warning: failed to fetch meta data for '%s' package '%s' (new package?) """ \
|
||||
% (dst_project, dst_package), file=sys.stderr)
|
||||
@ -2096,7 +2098,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
try:
|
||||
# works since OBS 2.1
|
||||
diff = request_diff(apiurl, reqid)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
# for OBS 2.0 and before
|
||||
sr_actions = r.get_actions('submit')
|
||||
if not sr_actions:
|
||||
@ -2137,7 +2139,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
r = change_review_state(apiurl, reqid, state_map[cmd], review.by_user, review.by_group,
|
||||
review.by_project, review.by_package, opts.message or '', supersed=supersedid)
|
||||
print(r)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if review.by_user:
|
||||
print('No permission on review by user %s' % review.by_user)
|
||||
if review.by_group:
|
||||
@ -2194,7 +2196,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
if link_node != None:
|
||||
links_to_project = link_node.get('project') or project
|
||||
links_to_package = link_node.get('package') or package
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code != 404:
|
||||
print('Cannot get list of files for %s/%s: %s' % (project, package, e), file=sys.stderr)
|
||||
except SyntaxError as e:
|
||||
@ -2364,7 +2366,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
|
||||
try:
|
||||
copy_pac(apiurl, project, package, apiurl, project, package, expand=True, comment=opts.message)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
root = ET.fromstring(show_files_meta(apiurl, project, package, 'latest', expand=False))
|
||||
li = Linkinfo()
|
||||
li.read(root.find('linkinfo'))
|
||||
@ -2960,7 +2962,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
uproject = root.find('attribute').find('value').text
|
||||
print('\nNote: The branch has been created from the configured update project: %s' \
|
||||
% uproject)
|
||||
except (AttributeError, urllib2.HTTPError) as e:
|
||||
except (AttributeError, HTTPError) as e:
|
||||
devloc = srcprj
|
||||
print('\nNote: The branch has been created of a different project,\n' \
|
||||
' %s,\n' \
|
||||
@ -3378,7 +3380,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
try:
|
||||
show_package_meta(apiurl, project, package)
|
||||
return True
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code != 404:
|
||||
print('Cannot check that %s/%s exists: %s' % (project, package, e), file=sys.stderr)
|
||||
return False
|
||||
@ -3406,7 +3408,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
try:
|
||||
file = http_GET(link_url)
|
||||
root = ET.parse(file).getroot()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
return (None, None)
|
||||
except SyntaxError as e:
|
||||
print('Cannot parse %s/%s/_link: %s' % (project, package, e), file=sys.stderr)
|
||||
@ -3425,7 +3427,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
try:
|
||||
file = http_GET(link_url)
|
||||
root = ET.parse(file).getroot()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code != 404:
|
||||
print('Cannot get list of files for %s/%s: %s' % (project, package, e), file=sys.stderr)
|
||||
return (None, None, None)
|
||||
@ -4446,7 +4448,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
continue
|
||||
try:
|
||||
delete_files(apiurl, project, package, (filename, ))
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if opts.force:
|
||||
print(e, file=sys.stderr)
|
||||
body = e.read()
|
||||
@ -6288,7 +6290,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
print(r.list_view(), '\n')
|
||||
print("")
|
||||
return
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code == 400:
|
||||
# skip it ... try again with old style below
|
||||
pass
|
||||
@ -6483,7 +6485,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
what = {'published/binary/id': xpath}
|
||||
try:
|
||||
res = search(apiurl, **what)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code != 400 or not role_filter:
|
||||
raise e
|
||||
# backward compatibility: local role filtering
|
||||
@ -6886,7 +6888,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
for role in roles:
|
||||
try:
|
||||
setBugowner(apiurl, result.get('project'), result.get('package'), bugowner)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code == 403:
|
||||
print("No write permission in", result.get('project'), end=' ')
|
||||
if result.get('package'):
|
||||
@ -6909,7 +6911,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
for role in roles:
|
||||
try:
|
||||
setBugowner(apiurl, prj, pac, opts.delete, role)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code == 403:
|
||||
print("No write permission in", result.get('project'), end=' ')
|
||||
if result.get('package'):
|
||||
@ -7113,7 +7115,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
else:
|
||||
for data in streamfile(u):
|
||||
sys.stdout.write(data)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code == 404 and not opts.expand and not opts.unexpand:
|
||||
print('expanding link...', file=sys.stderr)
|
||||
query['rev'] = show_upstream_srcmd5(apiurl, args[0], args[1], expand=True, revision=opts.revision)
|
||||
@ -7532,7 +7534,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
|
||||
url = makeurl(apiurl, ['source', prj, '_pubkey'])
|
||||
f = http_GET(url)
|
||||
break
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
l = prj.rsplit(':', 1)
|
||||
# try key from parent project
|
||||
if not opts.notraverse and len(l) > 1 and l[0] and l[1] and e.code == 404:
|
||||
|
37
osc/conf.py
37
osc/conf.py
@ -40,20 +40,23 @@ import base64
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
try:
|
||||
from http.cookiejar import LWPCookieJar, CookieJar
|
||||
from http.client import HTTPConnection, HTTPResponse
|
||||
from io import StringIO
|
||||
from urllib.parse import urlsplit
|
||||
from urllib.error import URLError
|
||||
from urllib.request import HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, ProxyHandler
|
||||
from urllib.request import build_opener, proxy_bypass
|
||||
except ImportError:
|
||||
#python 2.x
|
||||
from cookielib import LWPCookieJar, CookieJar
|
||||
from httplib import HTTPConnection, HTTPResponse
|
||||
from StringIO import StringIO
|
||||
from urlparse import urlsplit
|
||||
from urllib2 import URLError, HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, ProxyHandler
|
||||
from urllib2 import build_opener, proxy_bypass
|
||||
|
||||
from . import OscConfigParser
|
||||
from osc import oscerr
|
||||
@ -374,7 +377,7 @@ def parse_apisrv_url(scheme, apisrv):
|
||||
return urlsplit(urljoin(scheme, apisrv))[0:2]
|
||||
else:
|
||||
msg = 'invalid apiurl \'%s\' (specify the protocol (http:// or https://))' % apisrv
|
||||
raise urllib2.URLError(msg)
|
||||
raise URLError(msg)
|
||||
|
||||
|
||||
def urljoin(scheme, apisrv):
|
||||
@ -437,23 +440,23 @@ def _build_opener(url):
|
||||
return _build_opener.last_opener[1]
|
||||
|
||||
# respect no_proxy env variable
|
||||
if urllib.proxy_bypass(apiurl):
|
||||
if proxy_bypass(apiurl):
|
||||
# initialize with empty dict
|
||||
proxyhandler = urllib2.ProxyHandler({})
|
||||
proxyhandler = ProxyHandler({})
|
||||
else:
|
||||
# read proxies from env
|
||||
proxyhandler = urllib2.ProxyHandler()
|
||||
proxyhandler = ProxyHandler()
|
||||
|
||||
# workaround for http://bugs.python.org/issue9639
|
||||
authhandler_class = urllib2.HTTPBasicAuthHandler
|
||||
authhandler_class = HTTPBasicAuthHandler
|
||||
if sys.version_info >= (2, 6, 6) and sys.version_info < (2, 7, 1) \
|
||||
and not 'reset_retry_count' in dir(urllib2.HTTPBasicAuthHandler):
|
||||
and not 'reset_retry_count' in dir(HTTPBasicAuthHandler):
|
||||
print('warning: your urllib2 version seems to be broken. ' \
|
||||
'Using a workaround for http://bugs.python.org/issue9639', file=sys.stderr)
|
||||
|
||||
class OscHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||
class OscHTTPBasicAuthHandler(HTTPBasicAuthHandler):
|
||||
def http_error_401(self, *args):
|
||||
response = urllib2.HTTPBasicAuthHandler.http_error_401(self, *args)
|
||||
response = HTTPBasicAuthHandler.http_error_401(self, *args)
|
||||
self.retried = 0
|
||||
return response
|
||||
|
||||
@ -463,7 +466,7 @@ def _build_opener(url):
|
||||
|
||||
authhandler_class = OscHTTPBasicAuthHandler
|
||||
elif sys.version_info >= (2, 6, 6) and sys.version_info < (2, 7, 99):
|
||||
class OscHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||
class OscHTTPBasicAuthHandler(HTTPBasicAuthHandler):
|
||||
def http_error_404(self, *args):
|
||||
self.reset_retry_count()
|
||||
return None
|
||||
@ -472,12 +475,12 @@ def _build_opener(url):
|
||||
elif sys.version_info >= (2, 6, 5) and sys.version_info < (2, 6, 6):
|
||||
# workaround for broken urllib2 in python 2.6.5: wrong credentials
|
||||
# lead to an infinite recursion
|
||||
class OscHTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
|
||||
class OscHTTPBasicAuthHandler(HTTPBasicAuthHandler):
|
||||
def retry_http_basic_auth(self, host, req, realm):
|
||||
# don't retry if auth failed
|
||||
if req.get_header(self.auth_header, None) is not None:
|
||||
return None
|
||||
return urllib2.HTTPBasicAuthHandler.retry_http_basic_auth(self, host, req, realm)
|
||||
return HTTPBasicAuthHandler.retry_http_basic_auth(self, host, req, realm)
|
||||
|
||||
authhandler_class = OscHTTPBasicAuthHandler
|
||||
|
||||
@ -485,7 +488,7 @@ def _build_opener(url):
|
||||
# with None as first argument, it will always use this username/password
|
||||
# combination for urls for which arg2 (apisrv) is a super-url
|
||||
authhandler = authhandler_class( \
|
||||
urllib2.HTTPPasswordMgrWithDefaultRealm())
|
||||
HTTPPasswordMgrWithDefaultRealm())
|
||||
authhandler.add_password(None, apiurl, options['user'], options['pass'])
|
||||
|
||||
if options['sslcertck']:
|
||||
@ -511,10 +514,10 @@ def _build_opener(url):
|
||||
ctx = oscssl.mySSLContext()
|
||||
if ctx.load_verify_locations(capath=capath, cafile=cafile) != 1:
|
||||
raise Exception('No CA certificates found')
|
||||
opener = m2urllib2.build_opener(ctx, oscssl.myHTTPSHandler(ssl_context=ctx, appname='osc'), urllib2.HTTPCookieProcessor(cookiejar), authhandler, proxyhandler)
|
||||
opener = m2urllib2.build_opener(ctx, oscssl.myHTTPSHandler(ssl_context=ctx, appname='osc'), HTTPCookieProcessor(cookiejar), authhandler, proxyhandler)
|
||||
else:
|
||||
print("WARNING: SSL certificate checks disabled. Connection is insecure!\n", file=sys.stderr)
|
||||
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar), authhandler, proxyhandler)
|
||||
opener = build_opener(HTTPCookieProcessor(cookiejar), authhandler, proxyhandler)
|
||||
opener.addheaders = [('User-agent', 'osc/%s' % __version__)]
|
||||
_build_opener.last_opener = (apiurl, opener)
|
||||
return opener
|
||||
@ -557,7 +560,7 @@ def init_basicauth(config):
|
||||
# brute force
|
||||
def urllib2_debug_init(self, debuglevel=0):
|
||||
self._debuglevel = 1
|
||||
urllib2.AbstractHTTPHandler.__init__ = urllib2_debug_init
|
||||
AbstractHTTPHandler.__init__ = urllib2_debug_init
|
||||
|
||||
cookie_file = os.path.expanduser(config['cookiejar'])
|
||||
global cookiejar
|
||||
|
86
osc/core.py
86
osc/core.py
@ -16,21 +16,27 @@ import locale
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import urllib2
|
||||
from urllib import pathname2url, quote_plus, urlencode, unquote
|
||||
try:
|
||||
from urllib.parse import urlsplit, urlunsplit, urlparse
|
||||
from io import StringIO
|
||||
except ImportError:
|
||||
#python 2.x
|
||||
from urlparse import urlsplit, urlunsplit, urlparse
|
||||
from cStringIO import StringIO
|
||||
import shutil
|
||||
import subprocess
|
||||
import re
|
||||
import socket
|
||||
import errno
|
||||
|
||||
try:
|
||||
from urllib.parse import urlsplit, urlunsplit, urlparse, quote_plus, urlencode, unquote
|
||||
from urllib.error import HTTPError
|
||||
from urllib.request import pathname2url, install_opener, urlopen
|
||||
from urllib.request import Request as URLRequest
|
||||
from io import StringIO
|
||||
except ImportError:
|
||||
#python 2.x
|
||||
from urlparse import urlsplit, urlunsplit, urlparse
|
||||
from urllib import pathname2url, quote_plus, urlencode, unquote
|
||||
from urllib2 import HTTPError, install_opener, urlopen
|
||||
from urllib2 import Request as URLRequest
|
||||
from cStringIO import StringIO
|
||||
|
||||
|
||||
try:
|
||||
from xml.etree import cElementTree as ET
|
||||
except ImportError:
|
||||
@ -292,7 +298,7 @@ class Serviceinfo:
|
||||
self.read(root, True)
|
||||
self.project = project
|
||||
self.package = package
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code != 403 and e.code != 400:
|
||||
raise e
|
||||
|
||||
@ -2869,11 +2875,11 @@ def http_request(method, url, headers={}, data=None, file=None, timeout=100):
|
||||
# adding data to an urllib2 request transforms it into a POST
|
||||
data = ''
|
||||
|
||||
req = urllib2.Request(url)
|
||||
req = URLRequest(url)
|
||||
api_host_options = {}
|
||||
if conf.is_known_apiurl(url):
|
||||
# ok no external request
|
||||
urllib2.install_opener(conf._build_opener(url))
|
||||
install_opener(conf._build_opener(url))
|
||||
api_host_options = conf.get_apiurl_api_host_options(url)
|
||||
for header, value in api_host_options['http_headers']:
|
||||
req.add_header(header, value)
|
||||
@ -2920,7 +2926,7 @@ def http_request(method, url, headers={}, data=None, file=None, timeout=100):
|
||||
if old_timeout != timeout and not api_host_options.get('sslcertck'):
|
||||
socket.setdefaulttimeout(timeout)
|
||||
try:
|
||||
fd = urllib2.urlopen(req, data=data)
|
||||
fd = urlopen(req, data=data)
|
||||
finally:
|
||||
if old_timeout != timeout and not api_host_options.get('sslcertck'):
|
||||
socket.setdefaulttimeout(old_timeout)
|
||||
@ -3041,7 +3047,7 @@ def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
|
||||
try:
|
||||
f = http_GET(url)
|
||||
return f.read()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
|
||||
raise
|
||||
|
||||
@ -3059,7 +3065,7 @@ def show_package_meta(apiurl, prj, pac, meta=False):
|
||||
try:
|
||||
f = http_GET(url)
|
||||
return f.readlines()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
|
||||
raise
|
||||
|
||||
@ -3084,7 +3090,7 @@ def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with
|
||||
try:
|
||||
f = http_GET(url)
|
||||
return f.readlines()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
|
||||
raise
|
||||
|
||||
@ -3116,7 +3122,7 @@ def show_pattern_metalist(apiurl, prj):
|
||||
try:
|
||||
f = http_GET(url)
|
||||
tree = ET.parse(f)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
|
||||
raise
|
||||
r = sorted([ node.get('name') for node in tree.getroot() ])
|
||||
@ -3128,7 +3134,7 @@ def show_pattern_meta(apiurl, prj, pattern):
|
||||
try:
|
||||
f = http_GET(url)
|
||||
return f.readlines()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
|
||||
raise
|
||||
|
||||
@ -3166,7 +3172,7 @@ class metafile:
|
||||
try:
|
||||
self.sync()
|
||||
break
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
error_help = "%d" % e.code
|
||||
if e.headers.get('X-Opensuse-Errorcode'):
|
||||
error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
|
||||
@ -3229,7 +3235,7 @@ def meta_exists(metatype,
|
||||
url = make_meta_url(metatype, path_args, apiurl)
|
||||
try:
|
||||
data = http_GET(url).readlines()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code == 404 and create_new:
|
||||
data = metatypes[metatype]['template']
|
||||
if template_args:
|
||||
@ -3611,7 +3617,7 @@ def create_submit_request(apiurl,
|
||||
f = http_POST(u, data=xml)
|
||||
root = ET.parse(f).getroot()
|
||||
r = root.get('id')
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.headers.get('X-Opensuse-Errorcode') == "submit_request_rejected":
|
||||
print("WARNING:")
|
||||
print("WARNING: Project does not accept submit request, request to open a NEW maintenance incident instead")
|
||||
@ -3897,7 +3903,7 @@ def get_group(apiurl, group):
|
||||
try:
|
||||
f = http_GET(u)
|
||||
return ''.join(f.readlines())
|
||||
except urllib2.HTTPError:
|
||||
except HTTPError:
|
||||
print('user \'%s\' not found' % group)
|
||||
return None
|
||||
|
||||
@ -3906,7 +3912,7 @@ def get_user_meta(apiurl, user):
|
||||
try:
|
||||
f = http_GET(u)
|
||||
return ''.join(f.readlines())
|
||||
except urllib2.HTTPError:
|
||||
except HTTPError:
|
||||
print('user \'%s\' not found' % user)
|
||||
return None
|
||||
|
||||
@ -4128,7 +4134,7 @@ def server_diff_noex(apiurl,
|
||||
old_project, old_package, old_revision,
|
||||
new_project, new_package, new_revision,
|
||||
unified, missingok, meta, expand)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
msg = None
|
||||
body = None
|
||||
try:
|
||||
@ -4166,12 +4172,12 @@ def submit_action_diff(apiurl, action):
|
||||
try:
|
||||
return server_diff(apiurl, action.tgt_project, action.tgt_package, None,
|
||||
action.src_project, action.src_package, action.src_rev, True, True)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code == 400:
|
||||
try:
|
||||
return server_diff(apiurl, action.tgt_project, action.tgt_package, None,
|
||||
action.src_project, action.src_package, action.src_rev, True, False)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code != 404:
|
||||
raise e
|
||||
root = ET.fromstring(e.read())
|
||||
@ -4546,7 +4552,7 @@ def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute,
|
||||
f = None
|
||||
try:
|
||||
f = http_POST(u)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
msg = ''.join(e.readlines())
|
||||
msg = msg.split('<summary>')[1]
|
||||
msg = msg.split('</summary>')[0]
|
||||
@ -4598,7 +4604,7 @@ def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None,
|
||||
u = makeurl(apiurl, ['source', src_project, src_package], query=query)
|
||||
try:
|
||||
f = http_POST(u)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if not return_existing:
|
||||
raise
|
||||
root = ET.fromstring(e.read())
|
||||
@ -4649,7 +4655,7 @@ def copy_pac(src_apiurl, src_project, src_package,
|
||||
found = None
|
||||
try:
|
||||
found = http_GET(url).readlines()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
pass
|
||||
if force_meta_update or not found:
|
||||
print('Sending meta data...')
|
||||
@ -4932,7 +4938,7 @@ def get_results(apiurl, prj, package, lastbuild=None, repository=[], arch=[], ve
|
||||
results = r = []
|
||||
try:
|
||||
results = get_package_results(apiurl, prj, package, lastbuild, repository, arch, oldstate)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
# check for simple timeout error and fetch again
|
||||
if e.code != 502:
|
||||
raise
|
||||
@ -5436,7 +5442,7 @@ def runservice(apiurl, prj, package):
|
||||
|
||||
try:
|
||||
f = http_POST(u)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'could not trigger service run for project \'%s\' package \'%s\'' % (prj, package)
|
||||
raise
|
||||
|
||||
@ -5458,7 +5464,7 @@ def rebuild(apiurl, prj, package, repo, arch, code=None):
|
||||
u = makeurl(apiurl, ['build', prj], query=query)
|
||||
try:
|
||||
f = http_POST(u)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'could not trigger rebuild for project \'%s\' package \'%s\'' % (prj, package)
|
||||
raise
|
||||
|
||||
@ -5580,7 +5586,7 @@ def abortbuild(apiurl, project, package=None, arch=None, repo=None):
|
||||
u = makeurl(apiurl, ['build', project], query)
|
||||
try:
|
||||
f = http_POST(u)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'abortion failed for project %s' % project
|
||||
if package:
|
||||
e.osc_msg += ' package %s' % package
|
||||
@ -5608,7 +5614,7 @@ def wipebinaries(apiurl, project, package=None, arch=None, repo=None, code=None)
|
||||
u = makeurl(apiurl, ['build', project], query)
|
||||
try:
|
||||
f = http_POST(u)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'wipe binary rpms failed for project %s' % project
|
||||
if package:
|
||||
e.osc_msg += ' package %s' % package
|
||||
@ -5794,7 +5800,7 @@ def owner(apiurl, binary, mode="binary", attribute=None, project=None, usefilter
|
||||
try:
|
||||
f = http_GET(u)
|
||||
res = ET.parse(f).getroot()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
# old server not supporting this search
|
||||
pass
|
||||
return res
|
||||
@ -5809,7 +5815,7 @@ def set_link_rev(apiurl, project, package, revision='', expand=False, baserev=Fa
|
||||
try:
|
||||
f = http_GET(url)
|
||||
root = ET.parse(f).getroot()
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
e.osc_msg = 'Unable to get _link file in package \'%s\' for project \'%s\'' % (package, project)
|
||||
raise
|
||||
|
||||
@ -6307,7 +6313,7 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None, igno
|
||||
try:
|
||||
change_request_state(*args, **kwargs)
|
||||
return True
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
print('Server returned an error:', e, file=sys.stderr)
|
||||
print('Try -f to force the state change', file=sys.stderr)
|
||||
return False
|
||||
@ -6343,7 +6349,7 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None, igno
|
||||
try:
|
||||
diff = request_diff(apiurl, request.reqid)
|
||||
tmpfile.write(diff)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code != 400:
|
||||
raise
|
||||
# backward compatible diff for old apis
|
||||
@ -6546,7 +6552,7 @@ def get_user_projpkgs(apiurl, user, role=None, exclude_projects=[], proj=True, p
|
||||
what['project_id'] = xpath_prj
|
||||
try:
|
||||
res = search(apiurl, **what)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
if e.code != 400 or not role_filter_xpath:
|
||||
raise e
|
||||
# backward compatibility: local role filtering
|
||||
@ -6635,7 +6641,7 @@ def find_default_project(apiurl=None, package=None):
|
||||
# any fast query will do here.
|
||||
show_package_meta(apiurl, prj, package)
|
||||
return prj
|
||||
except urllib2.HTTPError:
|
||||
except HTTPError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
18
osc/fetch.py
18
osc/fetch.py
@ -6,8 +6,14 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import sys, os
|
||||
import urllib2
|
||||
from urllib import quote_plus
|
||||
|
||||
try:
|
||||
from urllib.parse import quote_plus
|
||||
from urllib.request import HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, HTTPError
|
||||
except ImportError:
|
||||
#python 2.x
|
||||
from urllib import quote_plus
|
||||
from urllib2 import HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, HTTPError
|
||||
|
||||
from urlgrabber.grabber import URLGrabError
|
||||
from urlgrabber.mirror import MirrorGroup
|
||||
@ -45,7 +51,7 @@ class OscFileGrabber:
|
||||
try:
|
||||
for i in streamfile(url, progress_obj=self.progress_obj, text=text):
|
||||
f.write(i)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
exc = URLGrabError(14, str(e))
|
||||
exc.url = url
|
||||
exc.exception = e
|
||||
@ -73,12 +79,12 @@ class Fetcher:
|
||||
self.cpio = {}
|
||||
self.enable_cpio = enable_cpio
|
||||
|
||||
passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
passmgr = HTTPPasswordMgrWithDefaultRealm()
|
||||
for host in api_host_options.keys():
|
||||
passmgr.add_password(None, host, api_host_options[host]['user'], api_host_options[host]['pass'])
|
||||
openers = (urllib2.HTTPBasicAuthHandler(passmgr), )
|
||||
openers = (HTTPBasicAuthHandler(passmgr), )
|
||||
if cookiejar:
|
||||
openers += (urllib2.HTTPCookieProcessor(cookiejar), )
|
||||
openers += (HTTPCookieProcessor(cookiejar), )
|
||||
self.gr = OscFileGrabber(progress_obj=self.progress_obj)
|
||||
|
||||
def failureReport(self, errobj):
|
||||
|
@ -10,15 +10,16 @@ from M2Crypto.SSL.Checker import SSLVerificationError
|
||||
from M2Crypto import m2, SSL
|
||||
import M2Crypto.m2urllib2
|
||||
import socket
|
||||
import urllib
|
||||
import sys
|
||||
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import urlparse, splithost, splitport, splittype
|
||||
from urllib.request import addinfourl
|
||||
from http.client import HTTPSConnection
|
||||
except ImportError:
|
||||
#python 2.x
|
||||
from urlparse import urlparse
|
||||
from urllib import addinfourl, splithost, splitport, splittype
|
||||
from httplib import HTTPSConnection
|
||||
|
||||
from .core import raw_input
|
||||
@ -239,7 +240,7 @@ class myHTTPSHandler(M2Crypto.m2urllib2.HTTPSHandler):
|
||||
r.recv = r.read
|
||||
fp = socket._fileobject(r)
|
||||
|
||||
resp = urllib.addinfourl(fp, r.msg, req.get_full_url())
|
||||
resp = addinfourl(fp, r.msg, req.get_full_url())
|
||||
resp.code = r.status
|
||||
resp.msg = r.reason
|
||||
return resp
|
||||
@ -277,13 +278,13 @@ class myProxyHTTPSConnection(M2Crypto.httpslib.ProxyHTTPSConnection, HTTPSConnec
|
||||
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
|
||||
#putrequest is called before connect, so can interpret url and get
|
||||
#real host/port to be used to make CONNECT request to proxy
|
||||
proto, rest = urllib.splittype(url)
|
||||
proto, rest = splittype(url)
|
||||
if proto is None:
|
||||
raise ValueError("unknown URL type: %s" % url)
|
||||
#get host
|
||||
host, rest = urllib.splithost(rest)
|
||||
host, rest = splithost(rest)
|
||||
#try to get port
|
||||
host, port = urllib.splitport(host)
|
||||
host, port = splitport(host)
|
||||
#if port is not defined try to get from proto
|
||||
if port is None:
|
||||
try:
|
||||
|
Loading…
Reference in New Issue
Block a user