1
0
mirror of https://github.com/openSUSE/osc.git synced 2024-11-10 14:56:14 +01:00

Merge branch 'master' into python3_fetch_module

This commit is contained in:
Marco Strigl 2019-04-07 10:25:50 -05:00 committed by GitHub
commit b71deaa537
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 395 additions and 260 deletions

27
NEWS
View File

@ -1,5 +1,32 @@
0.165
-
0.164.2
- deleterequest for entire projects needs the --all option as additional protection
- rewrite packagequery to support python3
- rewrite oscerr module to support python3
- rewrite archqeury and debquery to support python3
- Export vc env vars when running a source service
0.164.1
- rewrite cpio handling to support python3
- rewrite ar module to support python3
- enable fetch module to support python3
- rework progressbar hanlding (if module is not present)
- improve os_path_samefile in core.py
0.164
- add support for approved requests (requires OBS 2.10)
- fix various multibuild problems
- improved and fixed various help texts
- check constraints without local checkout
- check out deleted sources (osc co -D)
- replace urlgrabber module with own module
- use progressbar module instead of urlgrabber to draw
progress bars
- show buildinfo for alternative projects (--alternative-project)
- run release job immediately (osc release --no-delay)
- build results on project level can now be watched (osc prjresults --watch)
0.163
- add sendsysrq command (requires OBS 2.10)

View File

@ -33,10 +33,7 @@ except ImportError:
from .conf import config, cookiejar
try:
from .meter import TextMeter
except:
TextMeter = None
from .meter import create_text_meter
change_personality = {
'i686': 'linux32',
@ -308,10 +305,9 @@ def get_preinstall_image(apiurl, arch, cache_dir, img_info):
print('packagecachedir is not writable for you?', file=sys.stderr)
print(e, file=sys.stderr)
sys.exit(1)
if sys.stdout.isatty() and TextMeter:
progress_obj = TextMeter()
else:
progress_obj = None
progress_obj = None
if sys.stdout.isatty():
progress_obj = create_text_meter(use_pb_fallback=False)
gr = OscFileGrabber(progress_obj=progress_obj)
try:
gr.urlgrab(url, filename=ifile_path_part, text='fetching image')
@ -502,6 +498,22 @@ def check_trusted_projects(apiurl, projects):
config['api_host_options'][apiurl]['trusted_prj'] = trusted
conf.config_set_option(apiurl, 'trusted_prj', ' '.join(trusted))
def get_kiwipath_from_buildinfo(apiurl, bi_filename, prj, repo):
bi = Buildinfo(bi_filename, apiurl, 'kiwi')
# If the project does not have a path defined we need to get the config
# via the repositories in the kiwi file. Unfortunately the buildinfo
# does not include a hint if this is the case, so we rely on a heuristic
# here: if the path list contains our own repo, it probably does not
# come from the kiwi file and thus a path is defined in the config.
# It is unlikely that our own repo is included in the kiwi file, as it
# contains no packages.
myprp = prj + '/' + repo
if myprp in bi.pathes:
return None
kiwipath = bi.pathes
kiwipath.insert(0, myprp)
return kiwipath
def main(apiurl, opts, argv):
repo = argv[0]
@ -783,8 +795,11 @@ def main(apiurl, opts, argv):
# maybe we should check for errors before saving the file
bi_file.write(bi_text)
bi_file.flush()
kiwipath = None
if build_type == 'kiwi':
kiwipath = get_kiwipath_from_buildinfo(apiurl, bi_filename, prj, repo)
print('Getting buildconfig from server and store to %s' % bc_filename)
bc = get_buildconfig(apiurl, prj, repo)
bc = get_buildconfig(apiurl, prj, repo, kiwipath)
if not bc_file:
bc_file = open(bc_filename, 'w')
bc_file.write(bc)

View File

@ -166,8 +166,8 @@ class Osc(cmdln.Cmdln):
self.options.verbose = conf.config['verbose']
self.download_progress = None
if conf.config.get('show_download_progress', False):
from .meter import TextMeter
self.download_progress = TextMeter()
from .meter import create_text_meter
self.download_progress = create_text_meter()
def get_cmd_help(self, cmdname):
@ -1939,6 +1939,8 @@ Please submit there instead, or use --nodevelproject to force direct submission.
help='specify message TEXT')
@cmdln.option('-r', '--repository', metavar='REPOSITORY',
help='specify repository')
@cmdln.option('--all', action='store_true',
help='deletes entire project with packages inside')
@cmdln.option('--accept-in-hours', metavar='HOURS',
help='specify time when request shall get accepted automatically. Only works with write permissions in target.')
@cmdln.alias("dr")
@ -1950,8 +1952,8 @@ Please submit there instead, or use --nodevelproject to force direct submission.
usage:
osc deletereq [-m TEXT] # works in checked out project/package
osc deletereq [-m TEXT] PROJECT [PACKAGE]
osc deletereq [-m TEXT] PROJECT [--repository REPOSITORY]
osc deletereq [-m TEXT] PROJECT PACKAGE
osc deletereq [-m TEXT] PROJECT [--all|--repository REPOSITORY]
${cmd_option_list}
"""
import cgi
@ -1977,6 +1979,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
else:
raise oscerr.WrongArgs('Please specify at least a project.')
if not opts.all and package is None:
raise oscerr.WrongOptions('No package name has been provided. Use --all option, if you want to request to delete the entire project.')
if opts.repository:
repository = opts.repository
@ -5203,8 +5208,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
opts.vertical = None
opts.show_non_building = None
opts.show_excluded = None
self.do_prjresults('prjresults', opts, *args)
return
return self.do_prjresults('prjresults', opts, *args)
if opts.xml and opts.csv:
raise oscerr.WrongOptions("--xml and --csv are mutual exclusive")
@ -6011,7 +6015,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
if (arg == osc.build.hostarch or arg in all_archs) and arg_arch is None:
# it seems to be an architecture in general
arg_arch = arg
if not (arg in osc.build.can_also_build.get(osc.build.hostarch) or arg == osc.build.hostarch):
if not (arg == osc.build.hostarch or arg in osc.build.can_also_build.get(osc.build.hostarch, [])):
print("WARNING: native compile is not possible, an emulator must be configured!")
elif not arg_repository:
arg_repository = arg
@ -6287,14 +6291,16 @@ Please submit there instead, or use --nodevelproject to force direct submission.
args = self.parse_repoarchdescr(args, opts.noinit or opts.offline, opts.alternative_project, False, opts.vm_type, opts.multibuild_package)
# check for source services
r = None
try:
if not opts.offline and not opts.noservice:
p = Package('.')
r = p.run_source_services(verbose=True)
except:
print("WARNING: package is not existing on server yet")
opts.local_package = True
if not opts.offline and not opts.noservice:
p = Package('.')
r = p.run_source_services(verbose=True)
if r:
print('Source service run failed!', file=sys.stderr)
sys.exit(1)
else:
msg = ('WARNING: source services from package or project will not'
'be executed. This may not be the same build as on server!')
print(msg)
if not opts.local_package:
try:
@ -6307,15 +6313,6 @@ Please submit there instead, or use --nodevelproject to force direct submission.
except oscerr.NoWorkingCopy:
pass
if opts.offline or opts.local_package or r == None:
print("WARNING: source service from package or project will not be executed. This may not be the same build as on server!")
elif (conf.config['local_service_run'] and not opts.noservice) and not opts.noinit:
if r != 0:
print('Source service run failed!', file=sys.stderr)
sys.exit(1)
# that is currently unreadable on cli, we should not have a backtrace on standard errors:
#raise oscerr.ServiceRuntimeError('Service run failed: \'%s\'', r)
if conf.config['no_verify']:
opts.no_verify = True
@ -6335,7 +6332,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
if opts.preload:
opts.nopreinstallimage = True
print('Building %s for %s/%s' % (args[2], args[0], args[1]))
if not opts.host:
return osc.build.main(self.get_api_url(), opts, args)
@ -8850,37 +8847,6 @@ Please submit there instead, or use --nodevelproject to force direct submission.
else:
apiurl = self.get_api_url()
# try to set the env variables for the user's realname and email
# (the variables are used by the "vc" script)
tag2envs = {'realname': ['VC_REALNAME'],
'email': ['VC_MAILADDR', 'mailaddr']}
tag2val = {}
missing_tags = []
for (tag, envs) in tag2envs.items():
env_present = [env for env in envs if env in os.environ]
config_present = tag in conf.config['api_host_options'][apiurl]
if not env_present and not config_present:
missing_tags.append(tag)
elif config_present:
tag2val[tag] = conf.config['api_host_options'][apiurl][tag]
if missing_tags:
user = conf.get_apiurl_usr(apiurl)
data = get_user_data(apiurl, user, *missing_tags)
if data is not None:
for tag in missing_tags:
val = data.pop(0)
if val != '-':
tag2val[tag] = val
else:
msg = 'Try env %s=...' % tag2envs[tag][0]
print(msg, file=sys.stderr)
for (tag, val) in tag2val.items():
for env in tag2envs[tag]:
os.environ[env] = val
if meego_style:
if opts.message or opts.just_edit:
print('Warning: to edit MeeGo style changelog, opts will be ignored.', file=sys.stderr)
@ -8899,6 +8865,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
cmd_list.extend(args)
vc_export_env(apiurl)
vc = Popen(cmd_list)
vc.wait()
sys.exit(vc.returncode)

View File

@ -5,7 +5,7 @@
from __future__ import print_function
__version__ = '0.164.git'
__version__ = '0.165.git'
# __store_version__ is to be incremented when the format of the working copy
# "store" changes in an incompatible way. Please add any needed migration
@ -247,7 +247,7 @@ buildstatus_symbols = {'succeeded': '.',
def os_path_samefile(path1, path2):
try:
return os.path.samefile(path1, path2)
except:
except AttributeError:
return os.path.realpath(path1) == os.path.realpath(path2)
class File:
@ -406,6 +406,12 @@ class Serviceinfo:
data = { 'name' : singleservice, 'command' : [ singleservice ], 'mode' : '' }
allservices = [data]
if not allservices:
# short-circuit to avoid a potential http request in vc_export_env
# (if there are no services to execute this http request is
# useless)
return 0
# services can detect that they run via osc this way
os.putenv("OSC_VERSION", get_osc_version())
@ -415,6 +421,8 @@ class Serviceinfo:
os.putenv("OBS_SERVICE_APIURL", self.apiurl)
os.putenv("OBS_SERVICE_PROJECT", self.project)
os.putenv("OBS_SERVICE_PACKAGE", self.package)
# also export vc env vars (some services (like obs_scm) use them)
vc_export_env(self.apiurl)
# recreate files
ret = 0
@ -2971,7 +2979,7 @@ class Request:
lines.append(' *** This request will get automatically accepted after '+self.accept_at+' ! ***\n')
if self.priority in [ 'critical', 'important' ] and self.state.name in [ 'new', 'review' ]:
lines.append(' *** This request has classified as '+self.priority+' ! ***\n')
if self.state.approver and self.state.name == 'review':
if self.state and self.state.approver and self.state.name == 'review':
lines.append(' *** This request got approved by '+self.state.approver+'. It will get automatically accepted after last review got accepted! ***\n')
for action in self.actions:
@ -4617,8 +4625,8 @@ def get_binary_file(apiurl, prj, repo, arch,
progress_meter = False):
progress_obj = None
if progress_meter:
from .meter import TextMeter
progress_obj = TextMeter()
from .meter import create_text_meter
progress_obj = create_text_meter()
target_filename = target_filename or filename
@ -6097,8 +6105,12 @@ def get_buildinfo(apiurl, prj, package, repository, arch, specfile=None, addlist
return f.read()
def get_buildconfig(apiurl, prj, repository):
u = makeurl(apiurl, ['build', prj, repository, '_buildconfig'])
def get_buildconfig(apiurl, prj, repository, path=None):
query = []
if path:
for prp in path:
query.append('path=%s' % quote_plus(prp))
u = makeurl(apiurl, ['build', prj, repository, '_buildconfig'], query=query)
f = http_GET(u)
return f.read()
@ -7786,4 +7798,37 @@ def checkout_deleted_package(apiurl, proj, pkg, dst):
f.write(data)
print('done.')
def vc_export_env(apiurl, quiet=False):
# try to set the env variables for the user's realname and email
# (the variables are used by the "vc" script or some source service)
tag2envs = {'realname': ['VC_REALNAME'],
'email': ['VC_MAILADDR', 'mailaddr']}
tag2val = {}
missing_tags = []
for (tag, envs) in tag2envs.items():
env_present = [env for env in envs if env in os.environ]
config_present = tag in conf.config['api_host_options'][apiurl]
if not env_present and not config_present:
missing_tags.append(tag)
elif config_present:
tag2val[tag] = conf.config['api_host_options'][apiurl][tag]
if missing_tags:
user = conf.get_apiurl_usr(apiurl)
data = get_user_data(apiurl, user, *missing_tags)
if data is not None:
for tag in missing_tags:
val = data.pop(0)
if val != '-':
tag2val[tag] = val
elif not quiet:
msg = 'Try env %s=...' % tag2envs[tag][0]
print(msg, file=sys.stderr)
for (tag, val) in tag2val.items():
for env in tag2envs[tag]:
os.environ[env] = val
# vim: sw=4 et

View File

@ -3,18 +3,25 @@
# and distributed under the terms of the GNU General Public Licence,
# either version 2, or (at your option) any later version.
import progressbar as pb
try:
import progressbar as pb
have_pb_module = True
except ImportError:
have_pb_module = False
class TextMeter(object):
class PBTextMeter(object):
def start(self, basename, size=None):
if size is None:
widgets = [basename + ': ', pb.AnimatedMarker(), ' ', pb.Timer()]
self.bar = pb.ProgressBar(widgets=widgets, maxval=pb.UnknownLength)
else:
widgets = [basename + ': ', pb.Percentage(), pb.Bar(), ' ',
pb.ETA()]
widgets = [basename + ': ', pb.Bar(), ' ', pb.ETA()]
if size:
# if size is 0, using pb.Percentage will result in
# a ZeroDivisionException
widgets.insert(1, pb.Percentage())
self.bar = pb.ProgressBar(widgets=widgets, maxval=size)
self.bar.start()
@ -24,4 +31,31 @@ class TextMeter(object):
def end(self):
self.bar.finish()
class NoPBTextMeter(object):
_complained = False
def start(self, basename, size=None):
if not self._complained:
print('Please install the progressbar module')
NoPBTextMeter._complained = True
print('Processing: %s' % basename)
def update(self, *args, **kwargs):
pass
def end(self, *args, **kwargs):
pass
def create_text_meter(*args, **kwargs):
use_pb_fallback = kwargs.pop('use_pb_fallback', True)
if have_pb_module or use_pb_fallback:
return TextMeter(*args, **kwargs)
return None
if have_pb_module:
TextMeter = PBTextMeter
else:
TextMeter = NoPBTextMeter
# vim: sw=4 et

View File

@ -82,7 +82,7 @@ class WorkingCopyOutdated(OscBaseError):
def __str__(self):
return ('Working copy \'%s\' is out of date (rev %s vs rev %s).\n'
'Looks as if you need to update it first.' \
% (self[0], self[1], self[2]))
% (self.args[0], self.args[1], self.args[2]))
class PackageError(OscBaseError):
"""Base class for all Package related exceptions"""

View File

@ -20,12 +20,8 @@ import re
import sys
import stat
#XXX: python 2.7 contains io.StringIO, which needs unicode instead of str
#therefor try to import old stuff before new one here
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from io import BytesIO
# workaround for python24
if not hasattr(os, 'SEEK_SET'):
@ -48,6 +44,9 @@ class ArHdr:
self.date = date.strip()
self.uid = uid.strip()
self.gid = gid.strip()
if not mode.strip():
# provide a dummy mode for the ext_fn hdr
mode = '0'
self.mode = stat.S_IMODE(int(mode, 8))
self.size = int(size)
self.fmag = fmag
@ -57,10 +56,10 @@ class ArHdr:
def __str__(self):
return '%16s %d' % (self.file, self.size)
class ArFile(StringIO):
class ArFile(BytesIO):
"""Represents a file which resides in the archive"""
def __init__(self, fn, uid, gid, mode, buf):
StringIO.__init__(self, buf)
BytesIO.__init__(self, buf)
self.name = fn
self.uid = uid
self.gid = gid
@ -75,9 +74,8 @@ class ArFile(StringIO):
if not dir:
dir = os.getcwd()
fn = os.path.join(dir, self.name)
f = open(fn, 'wb')
f.write(self.getvalue())
f.close()
with open(fn, 'wb') as f:
f.write(self.getvalue())
os.chmod(fn, self.mode)
uid = self.uid
if uid != os.geteuid() or os.geteuid() != 0:
@ -97,11 +95,12 @@ class Ar:
Readonly access.
"""
hdr_len = 60
hdr_pat = re.compile('^(.{16})(.{12})(.{6})(.{6})(.{8})(.{10})(.{2})', re.DOTALL)
hdr_pat = re.compile(b'^(.{16})(.{12})(.{6})(.{6})(.{8})(.{10})(.{2})',
re.DOTALL)
def __init__(self, fn = None, fh = None):
if fn == None and fh == None:
raise ArError('either \'fn\' or \'fh\' must be != None')
raise ValueError('either \'fn\' or \'fh\' must be != None')
if fh != None:
self.__file = fh
self.__closefile = False
@ -123,7 +122,7 @@ class Ar:
def _appendHdr(self, hdr):
# GNU uses an internal '//' file to store very long filenames
if hdr.file.startswith('//'):
if hdr.file.startswith(b'//'):
self.ext_fnhdr = hdr
else:
self.hdrs.append(hdr)
@ -137,11 +136,11 @@ class Ar:
Another special file is the '/' which contains the symbol lookup table.
"""
for h in self.hdrs:
if h.file == '/':
if h.file == b'/':
continue
# remove slashes which are appended by ar
h.file = h.file.rstrip('/')
if not h.file.startswith('/'):
h.file = h.file.rstrip(b'/')
if not h.file.startswith(b'/'):
continue
# handle long filename
off = int(h.file[1:len(h.file)])
@ -150,11 +149,11 @@ class Ar:
# XXX: is it safe to read all the data in one chunk? I assume the '//' data section
# won't be too large
data = self.__file.read(self.ext_fnhdr.size)
end = data.find('/')
end = data.find(b'/')
if end != -1:
h.file = data[0:end]
else:
raise ArError('//', 'invalid data section - trailing slash (off: %d)' % start)
raise ArError(b'//', 'invalid data section - trailing slash (off: %d)' % start)
def _get_file(self, hdr):
self.__file.seek(hdr.dataoff, os.SEEK_SET)
@ -162,25 +161,14 @@ class Ar:
self.__file.read(hdr.size))
def read(self):
"""reads in the archive. It tries to use mmap due to performance reasons (in case of large files)"""
"""reads in the archive."""
if not self.__file:
import mmap
self.__file = open(self.filename, 'rb')
try:
if sys.platform[:3] != 'win':
self.__file = mmap.mmap(self.__file.fileno(), os.path.getsize(self.__file.name), prot=mmap.PROT_READ)
else:
self.__file = mmap.mmap(self.__file.fileno(), os.path.getsize(self.__file.name))
except EnvironmentError as e:
if e.errno == 19 or ( hasattr(e, 'winerror') and e.winerror == 5 ):
print('cannot use mmap to read the file, falling back to the default io', file=sys.stderr)
else:
raise e
else:
self.__file.seek(0, os.SEEK_SET)
self._init_datastructs()
data = self.__file.read(7)
if data != '!<arch>':
if data != b'!<arch>':
raise ArError(self.filename, 'no ar archive')
pos = 8
while (len(data) != 0):
@ -208,7 +196,19 @@ class Ar:
def __iter__(self):
for h in self.hdrs:
if h.file == '/':
if h.file == b'/':
continue
yield self._get_file(h)
raise StopIteration()
if __name__ == '__main__':
if len(sys.argv) != 2:
print('usage: %s <arfile>' % sys.argv[0])
sys.exit(1)
# a potential user might want to pass a bytes instead of a str
# to make sure that the ArError's file attribute is always a
# bytes
ar = Ar(fn=sys.argv[1])
ar.read()
for hdr in ar.hdrs:
print(hdr)

View File

@ -17,7 +17,7 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
self.fields = {}
#self.magic = None
#self.pkgsuffix = 'pkg.tar.gz'
self.pkgsuffix = 'arch'
self.pkgsuffix = b'arch'
def read(self, all_tags=True, self_provides=True, *extra_tags):
# all_tags and *extra_tags are currently ignored
@ -28,22 +28,21 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
fn = open('/dev/null', 'wb')
pipe = subprocess.Popen(['tar', '-O', '-xf', self.__path, '.PKGINFO'], stdout=subprocess.PIPE, stderr=fn).stdout
for line in pipe.readlines():
line = line.rstrip().split(' = ', 2)
line = line.rstrip().split(b' = ', 2)
if len(line) == 2:
if not line[0] in self.fields:
self.fields[line[0]] = []
self.fields[line[0]].append(line[1])
field, value = line[0].decode('ascii'), line[1]
self.fields.setdefault(field, []).append(value)
if self_provides:
prv = '%s = %s' % (self.name(), self.fields['pkgver'][0])
prv = b'%s = %s' % (self.name(), self.fields['pkgver'][0])
self.fields.setdefault('provides', []).append(prv)
return self
def vercmp(self, archq):
res = cmp(int(self.epoch()), int(archq.epoch()))
res = packagequery.cmp(int(self.epoch()), int(archq.epoch()))
if res != 0:
return res
res = ArchQuery.rpmvercmp(self.version(), archq.version())
if res != None:
if res != 0:
return res
res = ArchQuery.rpmvercmp(self.release(), archq.release())
return res
@ -54,25 +53,31 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
def version(self):
pkgver = self.fields['pkgver'][0] if 'pkgver' in self.fields else None
if pkgver != None:
pkgver = re.sub(r'[0-9]+:', '', pkgver, 1)
pkgver = re.sub(r'-[^-]*$', '', pkgver)
pkgver = re.sub(br'[0-9]+:', b'', pkgver, 1)
pkgver = re.sub(br'-[^-]*$', b'', pkgver)
return pkgver
def release(self):
pkgver = self.fields['pkgver'][0] if 'pkgver' in self.fields else None
if pkgver != None:
m = re.search(r'-([^-])*$', pkgver)
m = re.search(br'-([^-])*$', pkgver)
if m:
return m.group(1)
return None
def epoch(self):
pkgver = self.fields['pkgver'][0] if 'pkgver' in self.fields else None
if pkgver != None:
m = re.match(r'([0-9])+:', pkgver)
def _epoch(self):
pkgver = self.fields.get('pkgver', [b''])[0]
if pkgver:
m = re.match(br'([0-9])+:', pkgver)
if m:
return m.group(1)
return None
return b''
def epoch(self):
epoch = self._epoch()
if epoch:
return epoch
return b'0'
def arch(self):
return self.fields['arch'][0] if 'arch' in self.fields else None
@ -103,7 +108,7 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
# libsolv treats an optdepend as a "suggests", hence we do the same
if 'optdepend' not in self.fields:
return []
return [re.sub(':.*', '', entry) for entry in self.fields['optdepend']]
return [re.sub(b':.*', b'', entry) for entry in self.fields['optdepend']]
def supplements(self):
# a .PKGINFO has no notion of "recommends"
@ -114,8 +119,17 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
return []
def canonname(self):
pkgver = self.fields['pkgver'][0] if 'pkgver' in self.fields else None
return self.name() + '-' + pkgver + '-' + self.arch() + '.' + self.pkgsuffix
name = self.name()
if name is None:
raise ArchError(self.path(), 'package has no name')
version = self.version()
if version is None:
raise ArchError(self.path(), 'package has no version')
arch = self.arch()
if arch is None:
raise ArchError(self.path(), 'package has no arch')
return ArchQuery.filename(name, self._epoch(), version, self.release(),
arch)
def gettag(self, tag):
# implement me, if needed
@ -137,20 +151,24 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
"""
if ver1 == ver2:
return 0
elif ver1 is None:
return -1
elif ver2 is None:
return 1
res = 0
while res == 0:
# remove all leading non alphanumeric chars
ver1 = re.sub('^[^a-zA-Z0-9]*', '', ver1)
ver2 = re.sub('^[^a-zA-Z0-9]*', '', ver2)
ver1 = re.sub(b'^[^a-zA-Z0-9]*', b'', ver1)
ver2 = re.sub(b'^[^a-zA-Z0-9]*', b'', ver2)
if not (len(ver1) and len(ver2)):
break
# check if we have a digits segment
mo1 = re.match('(\d+)', ver1)
mo2 = re.match('(\d+)', ver2)
mo1 = re.match(b'(\d+)', ver1)
mo2 = re.match(b'(\d+)', ver2)
numeric = True
if mo1 is None:
mo1 = re.match('([a-zA-Z]+)', ver1)
mo2 = re.match('([a-zA-Z]+)', ver2)
mo1 = re.match(b'([a-zA-Z]+)', ver1)
mo2 = re.match(b'([a-zA-Z]+)', ver2)
numeric = False
# check for different types: alpha and numeric
if mo2 is None:
@ -163,43 +181,42 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
ver2 = ver2[mo2.end(1):]
if numeric:
# remove leading zeros
seg1 = re.sub('^0+', '', seg1)
seg2 = re.sub('^0+', '', seg2)
seg1 = re.sub(b'^0+', b'', seg1)
seg2 = re.sub(b'^0+', b'', seg2)
# longer digit segment wins - if both have the same length
# a simple ascii compare decides
res = len(seg1) - len(seg2) or cmp(seg1, seg2)
res = len(seg1) - len(seg2) or packagequery.cmp(seg1, seg2)
else:
res = cmp(seg1, seg2)
res = packagequery.cmp(seg1, seg2)
if res > 0:
return 1
elif res < 0:
return -1
return cmp(ver1, ver2)
return packagequery.cmp(ver1, ver2)
@staticmethod
def filename(name, epoch, version, release, arch):
if epoch:
if release:
return '%s-%s:%s-%s-%s.arch' % (name, epoch, version, release, arch)
return b'%s-%s:%s-%s-%s.arch' % (name, epoch, version, release, arch)
else:
return '%s-%s:%s-%s.arch' % (name, epoch, version, arch)
return b'%s-%s:%s-%s.arch' % (name, epoch, version, arch)
if release:
return '%s-%s-%s-%s.arch' % (name, version, release, arch)
return b'%s-%s-%s-%s.arch' % (name, version, release, arch)
else:
return '%s-%s-%s.arch' % (name, version, arch)
return b'%s-%s-%s.arch' % (name, version, arch)
if __name__ == '__main__':
import sys
archq = ArchQuery.query(sys.argv[1])
print(archq.name(), archq.version(), archq.release(), archq.arch())
try:
archq = ArchQuery.query(sys.argv[1])
print(archq.canonname())
except ArchError as e:
print(e.msg)
sys.exit(2)
print(archq.name(), archq.version(), archq.release(), archq.arch())
print(archq.canonname())
print(archq.description())
print('##########')
print('\n'.join(archq.provides()))
print(b'\n'.join(archq.provides()))
print('##########')
print('\n'.join(archq.requires()))
print(b'\n'.join(archq.requires()))

View File

@ -42,7 +42,7 @@ class CpioHdr:
"""
def __init__(self, mgc, ino, mode, uid, gid, nlink, mtime, filesize,
dev_maj, dev_min, rdev_maj, rdev_min, namesize, checksum,
off = -1, filename = ''):
off=-1, filename=b''):
"""
All passed parameters are hexadecimal strings (not NUL terminated) except
off and filename. They will be converted into normal ints.
@ -82,7 +82,7 @@ class CpioRead:
# supported formats - use name -> mgc mapping to increase readabilty
sfmt = {
'newascii': '070701',
'newascii': b'070701',
}
# header format
@ -124,11 +124,10 @@ class CpioRead:
if not stat.S_ISREG(stat.S_IFMT(hdr.mode)):
msg = '\'%s\' is no regular file - only regular files are supported atm' % hdr.filename
raise NotImplementedError(msg)
fn = os.path.join(dest, fn)
f = open(fn, 'wb')
self.__file.seek(hdr.dataoff, os.SEEK_SET)
f.write(self.__file.read(hdr.filesize))
f.close()
fn = os.path.join(dest, fn)
with open(fn, 'wb') as f:
f.write(self.__file.read(hdr.filesize))
os.chmod(fn, hdr.mode)
uid = hdr.uid
if uid != os.geteuid() or os.geteuid() != 1:
@ -147,16 +146,6 @@ class CpioRead:
def read(self):
if not self.__file:
self.__file = open(self.filename, 'rb')
try:
if sys.platform[:3] != 'win':
self.__file = mmap.mmap(self.__file.fileno(), os.path.getsize(self.__file.name), prot = mmap.PROT_READ)
else:
self.__file = mmap.mmap(self.__file.fileno(), os.path.getsize(self.__file.name))
except EnvironmentError as e:
if e.errno == 19 or ( hasattr(e, 'winerror') and e.winerror == 5 ):
print('cannot use mmap to read the file, failing back to default', file=sys.stderr)
else:
raise e
else:
self.__file.seek(0, os.SEEK_SET)
self._init_datastructs()
@ -174,7 +163,7 @@ class CpioRead:
data = struct.unpack(self.hdr_fmt, data)
hdr = CpioHdr(*data)
hdr.filename = self.__file.read(hdr.namesize - 1)
if hdr.filename == 'TRAILER!!!':
if hdr.filename == b'TRAILER!!!':
break
pos += hdr.namesize
if self._is_format('newascii'):
@ -210,47 +199,59 @@ class CpioWrite:
"""cpio archive small files in memory, using new style portable header format"""
def __init__(self):
self.cpio = ''
self.cpio = bytearray()
def add(self, name=None, content=None, perms=0x1a4, type=0x8000):
namesize = len(name) + 1
if namesize % 2:
name += '\0'
name += b'\0'
filesize = len(content)
mode = perms | type
c = []
c.append('070701') # magic
c.append('%08X' % 0) # inode
c.append('%08X' % mode) # mode
c.append('%08X' % 0) # uid
c.append('%08X' % 0) # gid
c.append('%08X' % 0) # nlink
c.append('%08X' % 0) # mtime
c.append('%08X' % filesize)
c.append('%08X' % 0) # major
c.append('%08X' % 0) # minor
c.append('%08X' % 0) # rmajor
c.append('%08X' % 0) # rminor
c.append('%08X' % namesize)
c.append('%08X' % 0) # checksum
c = bytearray()
c.extend(b'070701') # magic
c.extend(b'%08X' % 0) # inode
c.extend(b'%08X' % mode) # mode
c.extend(b'%08X' % 0) # uid
c.extend(b'%08X' % 0) # gid
c.extend(b'%08X' % 0) # nlink
c.extend(b'%08X' % 0) # mtime
c.extend(b'%08X' % filesize)
c.extend(b'%08X' % 0) # major
c.extend(b'%08X' % 0) # minor
c.extend(b'%08X' % 0) # rmajor
c.extend(b'%08X' % 0) # rminor
c.extend(b'%08X' % namesize)
c.extend(b'%08X' % 0) # checksum
c.append(name + '\0')
c.append('\0' * (len(''.join(c)) % 4))
c.extend(name + b'\0')
c.extend(b'\0' * (len(c) % 4))
c.append(content)
c.extend(content)
c = ''.join(c)
if len(c) % 4:
c += '\0' * (4 - len(c) % 4)
c.extend(b'\0' * (4 - len(c) % 4))
self.cpio += c
self.cpio.extend(c)
def add_padding(self):
if len(self.cpio) % 512:
self.cpio += '\0' * (512 - len(self.cpio) % 512)
self.cpio.extend(b'\0' * (512 - len(self.cpio) % 512))
def get(self):
self.add('TRAILER!!!', '')
self.add(b'TRAILER!!!', b'')
self.add_padding()
return ''.join(self.cpio)
return bytes(self.cpio)
if __name__ == '__main__':
if len(sys.argv) != 2:
print('usage: %s /path/to/file.cpio' % sys.argv[0])
sys.exit(1)
# a potential user might want to pass a bytes instead of a str
# to make sure that the CpioError's file attribute is always a
# bytes
cpio = CpioRead(sys.argv[1])
cpio.read()
for hdr in cpio:
print(hdr)

View File

@ -5,8 +5,10 @@ from . import ar
import os.path
import re
import tarfile
import StringIO
from io import BytesIO
from . import packagequery
import itertools
HAVE_LZMA = True
try:
@ -14,13 +16,21 @@ try:
except ImportError:
HAVE_LZMA = False
if (not hasattr(itertools, 'zip_longest')
and hasattr(itertools, 'izip_longest')):
# python2 case
itertools.zip_longest = itertools.izip_longest
class DebError(packagequery.PackageError):
pass
class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
default_tags = ('package', 'version', 'release', 'epoch', 'architecture', 'description',
'provides', 'depends', 'pre_depends', 'conflicts', 'breaks')
default_tags = (b'package', b'version', b'release', b'epoch',
b'architecture', b'description', b'provides', b'depends',
b'pre_depends', b'conflicts', b'breaks')
def __init__(self, fh):
self.__file = fh
@ -31,24 +41,24 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
def read(self, all_tags=False, self_provides=True, *extra_tags):
arfile = ar.Ar(fh = self.__file)
arfile.read()
debbin = arfile.get_file('debian-binary')
debbin = arfile.get_file(b'debian-binary')
if debbin is None:
raise DebError(self.__path, 'no debian binary')
if debbin.read() != '2.0\n':
if debbin.read() != b'2.0\n':
raise DebError(self.__path, 'invalid debian binary format')
control = arfile.get_file('control.tar.gz')
control = arfile.get_file(b'control.tar.gz')
if control is not None:
# XXX: python2.4 relies on a name
tar = tarfile.open(name='control.tar.gz', fileobj=control)
else:
control = arfile.get_file('control.tar.xz')
control = arfile.get_file(b'control.tar.xz')
if control is None:
raise DebError(self.__path, 'missing control.tar')
if not HAVE_LZMA:
raise DebError(self.__path, 'can\'t open control.tar.xz without python-lzma')
decompressed = lzma.decompress(control.read())
tar = tarfile.open(name="control.tar.xz",
fileobj=StringIO.StringIO(decompressed))
fileobj=BytesIO(decompressed))
try:
name = './control'
# workaround for python2.4's tarfile module
@ -64,94 +74,98 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
def __parse_control(self, control, all_tags=False, self_provides=True, *extra_tags):
data = control.readline().strip()
while data:
field, val = re.split(':\s*', data.strip(), 1)
field, val = re.split(b':\s*', data.strip(), 1)
data = control.readline()
while data and re.match('\s+', data):
val += '\n' + data.strip()
while data and re.match(b'\s+', data):
val += b'\n' + data.strip()
data = control.readline().rstrip()
field = field.replace('-', '_').lower()
field = field.replace(b'-', b'_').lower()
if field in self.default_tags + extra_tags or all_tags:
# a hyphen is not allowed in dict keys
self.fields[field] = val
versrel = self.fields['version'].rsplit('-', 1)
versrel = self.fields[b'version'].rsplit(b'-', 1)
if len(versrel) == 2:
self.fields['version'] = versrel[0]
self.fields['release'] = versrel[1]
self.fields[b'version'] = versrel[0]
self.fields[b'release'] = versrel[1]
else:
self.fields['release'] = None
verep = self.fields['version'].split(':', 1)
self.fields[b'release'] = None
verep = self.fields[b'version'].split(b':', 1)
if len(verep) == 2:
self.fields['epoch'] = verep[0]
self.fields['version'] = verep[1]
self.fields[b'epoch'] = verep[0]
self.fields[b'version'] = verep[1]
else:
self.fields['epoch'] = '0'
self.fields['provides'] = [ i.strip() for i in re.split(',\s*', self.fields.get('provides', '')) if i ]
self.fields['depends'] = [ i.strip() for i in re.split(',\s*', self.fields.get('depends', '')) if i ]
self.fields['pre_depends'] = [ i.strip() for i in re.split(',\s*', self.fields.get('pre_depends', '')) if i ]
self.fields['conflicts'] = [ i.strip() for i in re.split(',\s*', self.fields.get('conflicts', '')) if i ]
self.fields['breaks'] = [ i.strip() for i in re.split(',\s*', self.fields.get('breaks', '')) if i ]
self.fields['recommends'] = [ i.strip() for i in re.split(',\s*', self.fields.get('recommends', '')) if i ]
self.fields['suggests'] = [ i.strip() for i in re.split(',\s*', self.fields.get('suggests', '')) if i ]
self.fields['enhances'] = [ i.strip() for i in re.split(',\s*', self.fields.get('enhances', '')) if i ]
self.fields[b'epoch'] = b'0'
self.fields[b'provides'] = self._split_field_value(b'provides')
self.fields[b'depends'] = self._split_field_value(b'depends')
self.fields[b'pre_depends'] = self._split_field_value(b'pre_depends')
self.fields[b'conflicts'] = self._split_field_value(b'conflicts')
self.fields[b'breaks'] = self._split_field_value(b'breaks')
self.fields[b'recommends'] = self._split_field_value(b'recommends')
self.fields[b'suggests'] = self._split_field_value(b'suggests')
self.fields[b'enhances'] = self._split_field_value(b'enhances')
if self_provides:
# add self provides entry
self.fields['provides'].append('%s (= %s)' % (self.name(), '-'.join(versrel)))
self.fields[b'provides'].append(b'%s (= %s)' % (self.name(), b'-'.join(versrel)))
def _split_field_value(self, field, delimeter=b',\s*'):
return [i.strip()
for i in re.split(delimeter, self.fields.get(field, b'')) if i]
def vercmp(self, debq):
res = cmp(int(self.epoch()), int(debq.epoch()))
res = packagequery.cmp(int(self.epoch()), int(debq.epoch()))
if res != 0:
return res
res = DebQuery.debvercmp(self.version(), debq.version())
if res != None:
if res != 0:
return res
res = DebQuery.debvercmp(self.release(), debq.release())
return res
def name(self):
return self.fields['package']
return self.fields[b'package']
def version(self):
return self.fields['version']
return self.fields[b'version']
def release(self):
return self.fields['release']
return self.fields[b'release']
def epoch(self):
return self.fields['epoch']
return self.fields[b'epoch']
def arch(self):
return self.fields['architecture']
return self.fields[b'architecture']
def description(self):
return self.fields['description']
return self.fields[b'description']
def path(self):
return self.__path
def provides(self):
return self.fields['provides']
return self.fields[b'provides']
def requires(self):
return self.fields['depends'] + self.fields['pre_depends']
return self.fields[b'depends'] + self.fields[b'pre_depends']
def conflicts(self):
return self.fields['conflicts'] + self.fields['breaks']
return self.fields[b'conflicts'] + self.fields[b'breaks']
def obsoletes(self):
return []
def recommends(self):
return self.fields['recommends']
return self.fields[b'recommends']
def suggests(self):
return self.fields['suggests']
return self.fields[b'suggests']
def supplements(self):
# a control file has no notion of "supplements"
return []
def enhances(self):
return self.fields['enhances']
return self.fields[b'enhances']
def gettag(self, num):
return self.fields.get(num, None)
@ -174,20 +188,31 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
"""
# 32 is arbitrary - it is needed for the "longer digit string wins" handling
# (found this nice approach in Build/Deb.pm (build package))
ver1 = re.sub('(\d+)', lambda m: (32 * '0' + m.group(1))[-32:], ver1)
ver2 = re.sub('(\d+)', lambda m: (32 * '0' + m.group(1))[-32:], ver2)
vers = map(lambda x, y: (x or '', y or ''), ver1, ver2)
ver1 = re.sub(b'(\d+)', lambda m: (32 * b'0' + m.group(1))[-32:], ver1)
ver2 = re.sub(b'(\d+)', lambda m: (32 * b'0' + m.group(1))[-32:], ver2)
vers = itertools.zip_longest(ver1, ver2, fillvalue=b'')
for v1, v2 in vers:
if v1 == v2:
continue
if not v1:
# this makes the corresponding condition in the following
# else part superfluous - keep the superfluous condition for
# now (just to ease a (hopefully) upcoming refactoring (this
# method really deserves a cleanup...))
return -1
if not v2:
# see above
return 1
v1 = bytes(bytearray([v1]))
v2 = bytes(bytearray([v2]))
if (v1.isalpha() and v2.isalpha()) or (v1.isdigit() and v2.isdigit()):
res = cmp(v1, v2)
res = packagequery.cmp(v1, v2)
if res != 0:
return res
else:
if v1 == '~' or not v1:
if v1 == b'~' or not v1:
return -1
elif v2 == '~' or not v2:
elif v2 == b'~' or not v2:
return 1
ord1 = ord(v1)
if not (v1.isalpha() or v1.isdigit()):
@ -204,9 +229,9 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
@staticmethod
def filename(name, epoch, version, release, arch):
if release:
return '%s_%s-%s_%s.deb' % (name, version, release, arch)
return b'%s_%s-%s_%s.deb' % (name, version, release, arch)
else:
return '%s_%s_%s.deb' % (name, version, arch)
return b'%s_%s_%s.deb' % (name, version, arch)
if __name__ == '__main__':
import sys
@ -218,6 +243,6 @@ if __name__ == '__main__':
print(debq.name(), debq.version(), debq.release(), debq.arch())
print(debq.description())
print('##########')
print('\n'.join(debq.provides()))
print(b'\n'.join(debq.provides()))
print('##########')
print('\n'.join(debq.requires()))
print(b'\n'.join(debq.requires()))

View File

@ -60,18 +60,18 @@ class PackageQuery:
f.seek(0)
extra_tags = ()
pkgquery = None
if magic[:4] == '\xed\xab\xee\xdb':
if magic[:4] == b'\xed\xab\xee\xdb':
from . import rpmquery
pkgquery = rpmquery.RpmQuery(f)
extra_tags = extra_rpmtags
elif magic == '!<arch>':
elif magic == b'!<arch>':
from . import debquery
pkgquery = debquery.DebQuery(f)
extra_tags = extra_debtags
elif magic[:5] == '<?xml':
elif magic[:5] == b'<?xml':
f.close()
return None
elif magic[:5] == '\375\067zXZ' or magic[:2] == '\037\213':
elif magic[:5] == b'\375\067zXZ' or magic[:2] == b'\037\213':
from . import archquery
pkgquery = archquery.ArchQuery(f)
else:
@ -159,6 +159,11 @@ class PackageQueryResult:
evr = epoch + ":" + evr
return evr
def cmp(a, b):
return (a > b) - (a < b)
if __name__ == '__main__':
import sys
try:

View File

@ -370,16 +370,15 @@ class RpmQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
def filename(name, epoch, version, release, arch):
return '%s-%s-%s.%s.rpm' % (name, version, release, arch)
def unpack_string(data):
def unpack_string(data, encoding=None):
"""unpack a '\\0' terminated string from data"""
val = ''
for c in data:
c, = struct.unpack('!c', c)
if c == '\0':
break
else:
val += c
return val
idx = data.find(b'\0')
if idx == -1:
raise ValueError('illegal string: not \\0 terminated')
data = data[:idx]
if encoding is not None:
data = data.decode(encoding)
return data
if __name__ == '__main__':
import sys