mirror of
https://github.com/openSUSE/osc.git
synced 2024-11-10 06:46:15 +01:00
- fixed #590606 ("osc/fetch.py does not support authenticated URLs")
bye bye urlgrabber.grabber.URLGrabber: now we're using our own "grabber" object (OscFileGrabber()) because this way we can reuse the urllib2 handlers (the latest python-urlgrabber version uses python-curl instead of urllib2...).
This commit is contained in:
parent
a4a179f1d9
commit
95ec7dee7b
@ -195,7 +195,7 @@ class Pac:
|
|||||||
# that the filename is suitable as identifier)
|
# that the filename is suitable as identifier)
|
||||||
self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.arch)
|
self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.arch)
|
||||||
self.fullfilename = os.path.join(self.localdir, self.filename)
|
self.fullfilename = os.path.join(self.localdir, self.filename)
|
||||||
self.url_local = 'file://%s/' % self.fullfilename
|
self.url_local = 'file://%s' % self.fullfilename
|
||||||
|
|
||||||
# first, add the local URL
|
# first, add the local URL
|
||||||
self.urllist.append(self.url_local)
|
self.urllist.append(self.url_local)
|
||||||
|
17
osc/core.py
17
osc/core.py
@ -1909,11 +1909,14 @@ def http_request(method, url, headers={}, data=None, file=None, timeout=100):
|
|||||||
data = ''
|
data = ''
|
||||||
|
|
||||||
req = urllib2.Request(url)
|
req = urllib2.Request(url)
|
||||||
|
api_host_options = {}
|
||||||
api_host_options=conf.get_apiurl_api_host_options(url)
|
try:
|
||||||
|
api_host_options = conf.get_apiurl_api_host_options(url)
|
||||||
for header, value in api_host_options['http_headers']:
|
for header, value in api_host_options['http_headers']:
|
||||||
req.add_header(header, value)
|
req.add_header(header, value)
|
||||||
|
except:
|
||||||
|
# "external" request (url is no apiurl)
|
||||||
|
pass
|
||||||
|
|
||||||
req.get_method = lambda: method
|
req.get_method = lambda: method
|
||||||
|
|
||||||
@ -1954,12 +1957,12 @@ def http_request(method, url, headers={}, data=None, file=None, timeout=100):
|
|||||||
|
|
||||||
old_timeout = socket.getdefaulttimeout()
|
old_timeout = socket.getdefaulttimeout()
|
||||||
# XXX: dirty hack as timeout doesn't work with python-m2crypto
|
# XXX: dirty hack as timeout doesn't work with python-m2crypto
|
||||||
if old_timeout != timeout and not api_host_options['sslcertck']:
|
if old_timeout != timeout and not api_host_options.get('sslcertck'):
|
||||||
socket.setdefaulttimeout(timeout)
|
socket.setdefaulttimeout(timeout)
|
||||||
try:
|
try:
|
||||||
fd = urllib2.urlopen(req, data=data)
|
fd = urllib2.urlopen(req, data=data)
|
||||||
finally:
|
finally:
|
||||||
if old_timeout != timeout and not api_host_options['sslcertck']:
|
if old_timeout != timeout and not api_host_options.get('sslcertck'):
|
||||||
socket.setdefaulttimeout(old_timeout)
|
socket.setdefaulttimeout(old_timeout)
|
||||||
if hasattr(conf.cookiejar, 'save'):
|
if hasattr(conf.cookiejar, 'save'):
|
||||||
conf.cookiejar.save(ignore_discard=True)
|
conf.cookiejar.save(ignore_discard=True)
|
||||||
@ -3680,7 +3683,7 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non
|
|||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=None):
|
def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=None, text=None):
|
||||||
"""
|
"""
|
||||||
performs http_meth on url and read bufsize bytes from the response
|
performs http_meth on url and read bufsize bytes from the response
|
||||||
until EOF is reached. After each read bufsize bytes are yielded to the
|
until EOF is reached. After each read bufsize bytes are yielded to the
|
||||||
@ -3690,7 +3693,7 @@ def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=
|
|||||||
if progress_obj:
|
if progress_obj:
|
||||||
import urlparse
|
import urlparse
|
||||||
basename = os.path.basename(urlparse.urlsplit(url)[2])
|
basename = os.path.basename(urlparse.urlsplit(url)[2])
|
||||||
progress_obj.start(basename=basename, size=int(f.info().get('Content-Length', -1)))
|
progress_obj.start(basename=basename, text=text, size=int(f.info().get('Content-Length', -1)))
|
||||||
data = f.read(bufsize)
|
data = f.read(bufsize)
|
||||||
read = len(data)
|
read = len(data)
|
||||||
while len(data):
|
while len(data):
|
||||||
|
61
osc/fetch.py
61
osc/fetch.py
@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
import sys, os
|
import sys, os
|
||||||
import urllib2
|
import urllib2
|
||||||
from urlgrabber.grabber import URLGrabber, URLGrabError
|
from urlgrabber.grabber import URLGrabError
|
||||||
from urlgrabber.mirror import MirrorGroup
|
from urlgrabber.mirror import MirrorGroup
|
||||||
from core import makeurl
|
from core import makeurl, streamfile
|
||||||
from util import packagequery, cpio
|
from util import packagequery, cpio
|
||||||
import conf
|
import conf
|
||||||
import tempfile
|
import tempfile
|
||||||
@ -23,22 +23,41 @@ def join_url(self, base_url, rel_url):
|
|||||||
IOW, we make MirrorGroup ignore relative_url"""
|
IOW, we make MirrorGroup ignore relative_url"""
|
||||||
return base_url
|
return base_url
|
||||||
|
|
||||||
|
class OscFileGrabber:
|
||||||
|
def __init__(self, progress_obj = None):
|
||||||
|
self.progress_obj = progress_obj
|
||||||
|
|
||||||
|
def urlgrab(self, url, filename, text = None, **kwargs):
|
||||||
|
if url.startswith('file://'):
|
||||||
|
file = url.replace('file://', '', 1)
|
||||||
|
if os.path.isfile(file):
|
||||||
|
return file
|
||||||
|
else:
|
||||||
|
raise URLGrabError(2, 'Local file \'%s\' does not exist' % file)
|
||||||
|
f = open(filename, 'wb')
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
for i in streamfile(url, progress_obj=self.progress_obj, text=text):
|
||||||
|
f.write(i)
|
||||||
|
except urllib2.HTTPError, e:
|
||||||
|
exc = URLGrabError(14, str(e))
|
||||||
|
exc.url = url
|
||||||
|
exc.exception = e
|
||||||
|
exc.code = e.code
|
||||||
|
raise exc
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
return filename
|
||||||
|
|
||||||
class Fetcher:
|
class Fetcher:
|
||||||
def __init__(self, cachedir = '/tmp', api_host_options = {}, urllist = [], http_debug = False,
|
def __init__(self, cachedir = '/tmp', api_host_options = {}, urllist = [], http_debug = False,
|
||||||
cookiejar = None, offline = False, enable_cpio = False):
|
cookiejar = None, offline = False, enable_cpio = False):
|
||||||
|
|
||||||
__version__ = '0.1'
|
|
||||||
__user_agent__ = 'osbuild/%s' % __version__
|
|
||||||
|
|
||||||
# set up progress bar callback
|
# set up progress bar callback
|
||||||
if sys.stdout.isatty() and TextMeter:
|
if sys.stdout.isatty() and TextMeter:
|
||||||
self.progress_obj = TextMeter(fo=sys.stdout)
|
self.progress_obj = TextMeter(fo=sys.stdout)
|
||||||
else:
|
else:
|
||||||
self.progress_obj = None
|
self.progress_obj = None
|
||||||
|
|
||||||
|
|
||||||
self.nopac = False
|
|
||||||
self.cachedir = cachedir
|
self.cachedir = cachedir
|
||||||
self.urllist = urllist
|
self.urllist = urllist
|
||||||
self.http_debug = http_debug
|
self.http_debug = http_debug
|
||||||
@ -52,24 +71,15 @@ class Fetcher:
|
|||||||
openers = (urllib2.HTTPBasicAuthHandler(passmgr), )
|
openers = (urllib2.HTTPBasicAuthHandler(passmgr), )
|
||||||
if cookiejar:
|
if cookiejar:
|
||||||
openers += (urllib2.HTTPCookieProcessor(cookiejar), )
|
openers += (urllib2.HTTPCookieProcessor(cookiejar), )
|
||||||
self.gr = URLGrabber(user_agent=__user_agent__,
|
self.gr = OscFileGrabber(progress_obj=self.progress_obj)
|
||||||
keepalive=1,
|
|
||||||
opener = urllib2.build_opener(*openers),
|
|
||||||
progress_obj=self.progress_obj,
|
|
||||||
failure_callback=(self.failureReport,(),{}),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def failureReport(self, errobj):
|
def failureReport(self, errobj):
|
||||||
"""failure output for failovers from urlgrabber"""
|
"""failure output for failovers from urlgrabber"""
|
||||||
|
if errobj.url.startswith('file://'):
|
||||||
#log(0, '%s: %s' % (errobj.url, str(errobj.exception)))
|
return {}
|
||||||
#log(0, 'Trying other mirror.')
|
|
||||||
if not self.nopac:
|
|
||||||
print 'Trying openSUSE Build Service server for %s (%s), not found at %s.' \
|
print 'Trying openSUSE Build Service server for %s (%s), not found at %s.' \
|
||||||
% (self.curpac, self.curpac.project, errobj.url.split('/')[2])
|
% (self.curpac, self.curpac.project, errobj.url.split('/')[2])
|
||||||
raise errobj.exception
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def fetch(self, pac, prefix=''):
|
def fetch(self, pac, prefix=''):
|
||||||
# for use by the failure callback
|
# for use by the failure callback
|
||||||
@ -79,7 +89,7 @@ class Fetcher:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
MirrorGroup._join_url = join_url
|
MirrorGroup._join_url = join_url
|
||||||
mg = MirrorGroup(self.gr, pac.urllist)
|
mg = MirrorGroup(self.gr, pac.urllist, failure_callback=(self.failureReport,(),{}))
|
||||||
|
|
||||||
if self.http_debug:
|
if self.http_debug:
|
||||||
print
|
print
|
||||||
@ -90,8 +100,7 @@ class Fetcher:
|
|||||||
(fd, tmpfile) = tempfile.mkstemp(prefix='osc_build')
|
(fd, tmpfile) = tempfile.mkstemp(prefix='osc_build')
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
# it returns the filename
|
mg.urlgrab(pac.filename,
|
||||||
ret = mg.urlgrab(pac.filename,
|
|
||||||
filename = tmpfile,
|
filename = tmpfile,
|
||||||
text = '%s(%s) %s' %(prefix, pac.project, pac.filename))
|
text = '%s(%s) %s' %(prefix, pac.project, pac.filename))
|
||||||
self.move_package(tmpfile, pac.localdir, pac)
|
self.move_package(tmpfile, pac.localdir, pac)
|
||||||
@ -137,7 +146,6 @@ class Fetcher:
|
|||||||
print >>sys.stderr, e
|
print >>sys.stderr, e
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def run(self, buildinfo):
|
def run(self, buildinfo):
|
||||||
from urllib import quote_plus
|
from urllib import quote_plus
|
||||||
cached = 0
|
cached = 0
|
||||||
@ -197,7 +205,6 @@ class Fetcher:
|
|||||||
if os.path.exists(tmpfile):
|
if os.path.exists(tmpfile):
|
||||||
os.unlink(tmpfile)
|
os.unlink(tmpfile)
|
||||||
|
|
||||||
self.nopac = True
|
|
||||||
prjs = buildinfo.projects.keys()
|
prjs = buildinfo.projects.keys()
|
||||||
for i in prjs:
|
for i in prjs:
|
||||||
dest = "%s/%s" % (self.cachedir, i)
|
dest = "%s/%s" % (self.cachedir, i)
|
||||||
@ -232,8 +239,6 @@ class Fetcher:
|
|||||||
if len(l) > 1 and l[1] and not l[0] in buildinfo.projects:
|
if len(l) > 1 and l[1] and not l[0] in buildinfo.projects:
|
||||||
prjs.append(l[0])
|
prjs.append(l[0])
|
||||||
|
|
||||||
self.nopac = False
|
|
||||||
|
|
||||||
def verify_pacs_old(pac_list):
|
def verify_pacs_old(pac_list):
|
||||||
"""Take a list of rpm filenames and run rpm -K on them.
|
"""Take a list of rpm filenames and run rpm -K on them.
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user