1
0
mirror of https://github.com/openSUSE/osc.git synced 2024-12-30 19:56:14 +01:00

Merge branch 'python3_prep' of https://github.com/lethliel/osc

Get rid of the urlgrabber dependency. The current implementation of the
progress bar is quite "noisy" if the line length exceeds the size of
the terminal window, but that's something we could fix later. (The
superfluous error message will be fixed in a subsequent commit.)
This commit is contained in:
Marcus Huewe 2018-11-01 19:55:07 +01:00
commit bec52a7917
9 changed files with 115 additions and 194 deletions

4
README
View File

@ -26,10 +26,6 @@ Alternatively, you can directly use osc-wrapper.py from the source dir
The program needs the cElementTree python module installed. On SUSE, the The program needs the cElementTree python module installed. On SUSE, the
respective package is called python-elementtree (before 10.2: python-xml). respective package is called python-elementtree (before 10.2: python-xml).
For local building, you will need python-urlgrabber in addition. Those are
standard package on SUSE Linux since a while. If your version is too old, you
can find python-elementtree and python-urlgrabber here:
http://download.opensuse.org/repositories/devel:/languages:/python/

View File

@ -1,3 +1,3 @@
__all__ = ['babysitter', 'core', 'commandline', 'oscerr', 'othermethods', 'build', 'fetch', 'meter'] __all__ = ['babysitter', 'core', 'commandline', 'oscerr', 'build', 'fetch', 'meter', 'grabber']
# vim: sw=4 et # vim: sw=4 et

View File

@ -11,7 +11,6 @@ import pdb
import sys import sys
import signal import signal
import traceback import traceback
from urlgrabber.grabber import URLGrabError
from osc import oscerr from osc import oscerr
from .oscsslexcp import NoSecureSSLError from .oscsslexcp import NoSecureSSLError
@ -132,8 +131,6 @@ def run(prg, argv=None):
print(e, file=sys.stderr) print(e, file=sys.stderr)
except URLError as e: except URLError as e:
print('Failed to reach a server:\n', e.reason, file=sys.stderr) print('Failed to reach a server:\n', e.reason, file=sys.stderr)
except URLGrabError as e:
print('Failed to grab %s: %s' % (e.url, e.strerror), file=sys.stderr)
except IOError as e: except IOError as e:
# ignore broken pipe # ignore broken pipe
if e.errno != errno.EPIPE: if e.errno != errno.EPIPE:

View File

@ -256,18 +256,8 @@ class Pac:
def makeurls(self, cachedir, urllist): def makeurls(self, cachedir, urllist):
self.urllist = [] self.urllist = []
# build up local URL
# by using the urlgrabber with local urls, we basically build up a cache.
# the cache has no validation, since the package servers don't support etags,
# or if-modified-since, so the caching is simply name-based (on the assumption
# that the filename is suitable as identifier)
self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.arch) self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.arch)
self.fullfilename = os.path.join(self.localdir, self.canonname) self.fullfilename = os.path.join(self.localdir, self.canonname)
self.url_local = 'file://%s' % self.fullfilename
# first, add the local URL
self.urllist.append(self.url_local)
# remote URLs # remote URLs
for url in urllist: for url in urllist:
@ -319,14 +309,14 @@ def get_preinstall_image(apiurl, arch, cache_dir, img_info):
print(e, file=sys.stderr) print(e, file=sys.stderr)
sys.exit(1) sys.exit(1)
if sys.stdout.isatty() and TextMeter: if sys.stdout.isatty() and TextMeter:
progress_obj = TextMeter(fo=sys.stdout) progress_obj = TextMeter()
else: else:
progress_obj = None progress_obj = None
gr = OscFileGrabber(progress_obj=progress_obj) gr = OscFileGrabber(progress_obj=progress_obj)
try: try:
gr.urlgrab(url, filename=ifile_path_part, text='fetching image') gr.urlgrab(url, filename=ifile_path_part, text='fetching image')
except URLGrabError as e: except HTTPError as e:
print("Failed to download! ecode:%i errno:%i" % (e.code, e.errno)) print("Failed to download! ecode:%i reason:%i" % (e.code, e.reason))
return ('', '', []) return ('', '', [])
# download ok, rename partial file to final file name # download ok, rename partial file to final file name
os.rename(ifile_path_part, ifile_path) os.rename(ifile_path_part, ifile_path)

View File

@ -167,7 +167,7 @@ class Osc(cmdln.Cmdln):
self.download_progress = None self.download_progress = None
if conf.config.get('show_download_progress', False): if conf.config.get('show_download_progress', False):
from .meter import TextMeter from .meter import TextMeter
self.download_progress = TextMeter(hide_finished=True) self.download_progress = TextMeter()
def get_cmd_help(self, cmdname): def get_cmd_help(self, cmdname):
@ -7785,9 +7785,12 @@ Please submit there instead, or use --nodevelproject to force direct submission.
sys.exit(1) sys.exit(1)
if '://' in srpm: if '://' in srpm:
if srpm.endswith('/'):
print('%s is not a valid link. It must not end with /' % srpm)
sys.exit(1)
print('trying to fetch', srpm) print('trying to fetch', srpm)
import urlgrabber from .grabber import OscFileGrabber
urlgrabber.urlgrab(srpm) OscFileGrabber().urlgrab(srpm)
srpm = os.path.basename(srpm) srpm = os.path.basename(srpm)
srpm = os.path.abspath(srpm) srpm = os.path.abspath(srpm)

View File

@ -5992,8 +5992,11 @@ def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=
cl = int(cl) cl = int(cl)
if progress_obj: if progress_obj:
if not text:
basename = os.path.basename(urlsplit(url)[2]) basename = os.path.basename(urlsplit(url)[2])
progress_obj.start(basename=basename, text=text, size=cl) else:
basename = text
progress_obj.start(basename, cl)
if bufsize == "line": if bufsize == "line":
bufsize = 8192 bufsize = 8192
@ -6012,7 +6015,7 @@ def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=
yield data yield data
if progress_obj: if progress_obj:
progress_obj.end(read) progress_obj.end()
f.close() f.close()
if not cl is None and read != cl: if not cl is None and read != cl:

View File

@ -15,64 +15,26 @@ except ImportError:
from urllib import quote_plus from urllib import quote_plus
from urllib2 import HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, HTTPError from urllib2 import HTTPBasicAuthHandler, HTTPCookieProcessor, HTTPPasswordMgrWithDefaultRealm, HTTPError
from urlgrabber.grabber import URLGrabber, URLGrabError
from urlgrabber.mirror import MirrorGroup
from .core import makeurl, streamfile, dgst from .core import makeurl, streamfile, dgst
from .grabber import OscFileGrabber, OscMirrorGroup
from .util import packagequery, cpio from .util import packagequery, cpio
from . import conf from . import conf
from . import oscerr from . import oscerr
import tempfile import tempfile
import re import re
try: try:
from .meter import TextMeter from .meter import TextMeter
except: except ImportError:
TextMeter = None TextMeter = None
def join_url(self, base_url, rel_url):
"""to override _join_url of MirrorGroup, because we want to
pass full URLs instead of base URL where relative_url is added later...
IOW, we make MirrorGroup ignore relative_url
"""
return base_url
class OscFileGrabber(URLGrabber):
def __init__(self, progress_obj=None):
# we cannot use super because we still have to support
# older urlgrabber versions where URLGrabber is an old-style class
URLGrabber.__init__(self)
self.progress_obj = progress_obj
def urlgrab(self, url, filename, text=None, **kwargs):
if url.startswith('file://'):
f = url.replace('file://', '', 1)
if os.path.isfile(f):
return f
else:
raise URLGrabError(2, 'Local file \'%s\' does not exist' % f)
with file(filename, 'wb') as f:
try:
for i in streamfile(url, progress_obj=self.progress_obj,
text=text):
f.write(i)
except HTTPError as e:
exc = URLGrabError(14, str(e))
exc.url = url
exc.exception = e
exc.code = e.code
raise exc
except IOError as e:
raise URLGrabError(4, str(e))
return filename
class Fetcher: class Fetcher:
def __init__(self, cachedir='/tmp', api_host_options={}, urllist=[], def __init__(self, cachedir='/tmp', api_host_options={}, urllist=[],
http_debug=False, cookiejar=None, offline=False, enable_cpio=True): http_debug=False, cookiejar=None, offline=False, enable_cpio=True):
# set up progress bar callback # set up progress bar callback
if sys.stdout.isatty() and TextMeter: if sys.stdout.isatty() and TextMeter:
self.progress_obj = TextMeter(fo=sys.stdout) self.progress_obj = TextMeter()
else: else:
self.progress_obj = None self.progress_obj = None
@ -92,14 +54,6 @@ class Fetcher:
openers += (HTTPCookieProcessor(cookiejar), ) openers += (HTTPCookieProcessor(cookiejar), )
self.gr = OscFileGrabber(progress_obj=self.progress_obj) self.gr = OscFileGrabber(progress_obj=self.progress_obj)
def failureReport(self, errobj):
"""failure output for failovers from urlgrabber"""
if errobj.url.startswith('file://'):
return {}
print('%s/%s: attempting download from api, since not found at %s'
% (self.curpac.project, self.curpac, errobj.url.split('/')[2]))
return {}
def __add_cpio(self, pac): def __add_cpio(self, pac):
prpap = '%s/%s/%s/%s' % (pac.project, pac.repository, pac.repoarch, pac.repopackage) prpap = '%s/%s/%s/%s' % (pac.project, pac.repository, pac.repoarch, pac.repopackage)
self.cpio.setdefault(prpap, {})[pac.repofilename] = pac self.cpio.setdefault(prpap, {})[pac.repofilename] = pac
@ -156,8 +110,8 @@ class Fetcher:
raise oscerr.APIError('failed to fetch file \'%s\': ' raise oscerr.APIError('failed to fetch file \'%s\': '
'missing in CPIO archive' % 'missing in CPIO archive' %
pac.repofilename) pac.repofilename)
except URLGrabError as e: except HTTPError as e:
if e.errno != 14 or e.code != 414: if e.code != 414:
raise raise
# query str was too large # query str was too large
keys = list(pkgs.keys()) keys = list(pkgs.keys())
@ -181,8 +135,7 @@ class Fetcher:
# for use by the failure callback # for use by the failure callback
self.curpac = pac self.curpac = pac
MirrorGroup._join_url = join_url mg = OscMirrorGroup(self.gr, pac.urllist)
mg = MirrorGroup(self.gr, pac.urllist, failure_callback=(self.failureReport, (), {}))
if self.http_debug: if self.http_debug:
print('\nURLs to try for package \'%s\':' % pac, file=sys.stderr) print('\nURLs to try for package \'%s\':' % pac, file=sys.stderr)
@ -192,16 +145,19 @@ class Fetcher:
try: try:
with tempfile.NamedTemporaryFile(prefix='osc_build', with tempfile.NamedTemporaryFile(prefix='osc_build',
delete=False) as tmpfile: delete=False) as tmpfile:
mg.urlgrab(pac.filename, filename=tmpfile.name, mg_stat = mg.urlgrab(pac.filename, filename=tmpfile.name,
text='%s(%s) %s' % (prefix, pac.project, pac.filename)) text='%s(%s) %s' % (prefix, pac.project, pac.filename))
if mg_stat:
self.move_package(tmpfile.name, pac.localdir, pac) self.move_package(tmpfile.name, pac.localdir, pac)
except URLGrabError as e:
if self.enable_cpio and e.errno == 256: if not mg_stat:
if self.enable_cpio:
print('%s/%s: attempting download from api, since not found'
% (pac.project, pac.name))
self.__add_cpio(pac) self.__add_cpio(pac)
return return
print() print()
print('Error:', e.strerror, file=sys.stderr) print('Error: Failed to retrieve %s from the following locations '
print('Failed to retrieve %s from the following locations '
'(in order):' % pac.filename, file=sys.stderr) '(in order):' % pac.filename, file=sys.stderr)
print('\n'.join(pac.urllist), file=sys.stderr) print('\n'.join(pac.urllist), file=sys.stderr)
sys.exit(1) sys.exit(1)
@ -285,12 +241,12 @@ class Fetcher:
continue continue
try: try:
# if there isn't a progress bar, there is no output at all # if there isn't a progress bar, there is no output at all
prefix = ''
if not self.progress_obj: if not self.progress_obj:
print('%d/%d (%s) %s' % (done, needed, i.project, i.filename)) print('%d/%d (%s) %s' % (done, needed, i.project, i.filename))
self.fetch(i) else:
if self.progress_obj: prefix = '[%d/%d] ' % (done, needed)
print(" %d/%d\r" % (done, needed), end=' ') self.fetch(i, prefix=prefix)
sys.stdout.flush()
except KeyboardInterrupt: except KeyboardInterrupt:
print('Cancelled by user (ctrl-c)') print('Cancelled by user (ctrl-c)')
@ -308,10 +264,11 @@ class Fetcher:
dest += '/_pubkey' dest += '/_pubkey'
url = makeurl(buildinfo.apiurl, ['source', i, '_pubkey']) url = makeurl(buildinfo.apiurl, ['source', i, '_pubkey'])
try_parent = False
try: try:
if self.offline and not os.path.exists(dest): if self.offline and not os.path.exists(dest):
# may need to try parent # may need to try parent
raise URLGrabError(2) try_parent = True
elif not self.offline: elif not self.offline:
OscFileGrabber().urlgrab(url, dest) OscFileGrabber().urlgrab(url, dest)
# not that many keys usually # not that many keys usually
@ -324,12 +281,14 @@ class Fetcher:
if os.path.exists(dest): if os.path.exists(dest):
os.unlink(dest) os.unlink(dest)
sys.exit(0) sys.exit(0)
except URLGrabError as e: except HTTPError as e:
# Not found is okay, let's go to the next project # Not found is okay, let's go to the next project
if e.errno == 14 and e.code != 404: if e.code != 404:
print("Invalid answer from server", e, file=sys.stderr) print("Invalid answer from server", e, file=sys.stderr)
sys.exit(1) sys.exit(1)
try_parent = True
if try_parent:
if self.http_debug: if self.http_debug:
print("can't fetch key for %s: %s" % (i, e.strerror), file=sys.stderr) print("can't fetch key for %s: %s" % (i, e.strerror), file=sys.stderr)
print("url: %s" % url, file=sys.stderr) print("url: %s" % url, file=sys.stderr)

49
osc/grabber.py Normal file
View File

@ -0,0 +1,49 @@
# Copyright (C) 2018 SUSE Linux. All rights reserved.
# This program is free software; it may be used, copied, modified
# and distributed under the terms of the GNU General Public Licence,
# either version 2, or (at your option) any later version.
import sys
import os.path
from .core import streamfile
try:
from urllib.request import HTTPError
from urllib.parse import urlparse
from urllib.parse import unquote
except ImportError:
from urllib2 import HTTPError
from urlparse import urlparse
from urllib import unquote
class OscFileGrabber(object):
def __init__(self, progress_obj=None):
self.progress_obj = progress_obj
def urlgrab(self, url, filename=None, text=None):
if filename is None:
parts = urlparse(url)
filename = os.path.basename(unquote(parts[2]))
with open(filename, 'wb') as f:
for i in streamfile(url, progress_obj=self.progress_obj,
text=text):
f.write(i)
class OscMirrorGroup(object):
def __init__(self, grabber, mirrors):
self._grabber = grabber
self._mirrors = mirrors
def urlgrab(self, url, filename=None, text=None):
tries = 0
for mirror in self._mirrors:
try:
self._grabber.urlgrab(mirror, filename, text)
return True
except HTTPError as e:
print('Error %s' % e.code)
tries += 1
return False

View File

@ -1,103 +1,27 @@
# This library is free software; you can redistribute it and/or # Copyright (C) 2018 SUSE Linux. All rights reserved.
# modify it under the terms of the GNU Lesser General Public # This program is free software; it may be used, copied, modified
# License as published by the Free Software Foundation; either # and distributed under the terms of the GNU General Public Licence,
# version 2.1 of the License, or (at your option) any later version. # either version 2, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
# this is basically a copy of python-urlgrabber's TextMeter class, import progressbar as pb
# with support added for dynamical sizing according to screen size.
# it uses getScreenWidth() scrapped from smart.
# 2007-04-24, poeml
from __future__ import print_function
from urlgrabber.progress import BaseMeter, format_time, format_number
import sys, os
def getScreenWidth():
import termios, struct, fcntl
s = struct.pack('HHHH', 0, 0, 0, 0)
try:
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
except IOError:
return 80
return struct.unpack('HHHH', x)[1]
class TextMeter(BaseMeter): class TextMeter(object):
def __init__(self, fo=sys.stderr, hide_finished=False):
BaseMeter.__init__(self)
self.fo = fo
self.hide_finished = hide_finished
try:
width = int(os.environ['COLUMNS'])
except (KeyError, ValueError):
width = getScreenWidth()
def start(self, basename, size=None):
#self.unsized_templ = '\r%-60.60s %5sB %s ' if size is None:
self.unsized_templ = '\r%%-%s.%ss %%5sB %%s ' % (width *2/5, width*3/5) widgets = [basename + ': ', pb.AnimatedMarker(), ' ', pb.Timer()]
#self.sized_templ = '\r%-45.45s %3i%% |%-15.15s| %5sB %8s ' self.bar = pb.ProgressBar(widgets=widgets, maxval=pb.UnknownLength)
self.bar_length = width/5
self.sized_templ = '\r%%-%s.%ss %%3i%%%% |%%-%s.%ss| %%5sB %%8s ' % (width*4/10, width*4/10, self.bar_length, self.bar_length)
def _do_start(self, *args, **kwargs):
BaseMeter._do_start(self, *args, **kwargs)
self._do_update(0)
def _do_update(self, amount_read, now=None):
etime = self.re.elapsed_time()
fetime = format_time(etime)
fread = format_number(amount_read)
#self.size = None
if self.text is not None:
text = self.text
else: else:
text = self.basename widgets = [basename + ': ', pb.Percentage(), pb.Bar(), ' ',
if self.size is None: pb.ETA()]
out = self.unsized_templ % \ self.bar = pb.ProgressBar(widgets=widgets, maxval=size)
(text, fread, fetime) self.bar.start()
else:
rtime = self.re.remaining_time()
frtime = format_time(rtime)
frac = self.re.fraction_read()
bar = '='*int(self.bar_length * frac)
out = self.sized_templ % \ def update(self, amount_read):
(text, frac*100, bar, fread, frtime) + 'ETA ' self.bar.update(amount_read)
self.fo.write(out) def end(self):
self.fo.flush() self.bar.finish()
def _do_end(self, amount_read, now=None):
total_time = format_time(self.re.elapsed_time())
total_size = format_number(amount_read)
if self.text is not None:
text = self.text
else:
text = self.basename
if self.size is None:
out = self.unsized_templ % \
(text, total_size, total_time)
else:
bar = '=' * self.bar_length
out = self.sized_templ % \
(text, 100, bar, total_size, total_time) + ' '
if self.hide_finished:
self.fo.write('\r'+ ' '*len(out) + '\r')
else:
self.fo.write(out + '\n')
self.fo.flush()
# vim: sw=4 et # vim: sw=4 et