mirror of
https://github.com/openSUSE/osc.git
synced 2024-12-26 18:06:13 +01:00
Merge pull request #1140 from dmach/fix-pylint-errors-and-warnings
Fix pylint errors and warnings
This commit is contained in:
commit
ef376af287
@ -26,6 +26,7 @@ class ConfigLineOrder:
|
|||||||
It keeps track of all lines (including comments) in the _lines list. This list
|
It keeps track of all lines (including comments) in the _lines list. This list
|
||||||
either contains SectionLine() instances or CommentLine() instances.
|
either contains SectionLine() instances or CommentLine() instances.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._lines = []
|
self._lines = []
|
||||||
|
|
||||||
@ -56,7 +57,7 @@ class ConfigLineOrder:
|
|||||||
self._append(CommentLine(line))
|
self._append(CommentLine(line))
|
||||||
|
|
||||||
def keys(self):
|
def keys(self):
|
||||||
return [ i.name for i in self._lines if i.type == 'section' ]
|
return [i.name for i in self._lines if i.type == 'section']
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
section = SectionLine(key)
|
section = SectionLine(key)
|
||||||
@ -75,27 +76,30 @@ class ConfigLineOrder:
|
|||||||
self._lines.remove(line)
|
self._lines.remove(line)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
#return self._lines.__iter__()
|
# return self._lines.__iter__()
|
||||||
for line in self._lines:
|
for line in self._lines:
|
||||||
if line.type == 'section':
|
if line.type == 'section':
|
||||||
yield line.name
|
yield line.name
|
||||||
|
|
||||||
|
|
||||||
class Line:
|
class Line:
|
||||||
"""Base class for all line objects"""
|
"""Base class for all line objects"""
|
||||||
|
|
||||||
def __init__(self, name, type):
|
def __init__(self, name, type):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.type = type
|
self.type = type
|
||||||
|
|
||||||
|
|
||||||
class SectionLine(Line):
|
class SectionLine(Line):
|
||||||
"""
|
"""
|
||||||
This class represents a [section]. It stores all lines which belongs to
|
This class represents a [section]. It stores all lines which belongs to
|
||||||
this certain section in the _lines list. The _lines list either contains
|
this certain section in the _lines list. The _lines list either contains
|
||||||
CommentLine() or OptionLine() instances.
|
CommentLine() or OptionLine() instances.
|
||||||
"""
|
"""
|
||||||
def __init__(self, sectname, dict = {}):
|
|
||||||
Line.__init__(self, sectname, 'section')
|
def __init__(self, sectname):
|
||||||
|
super().__init__(sectname, 'section')
|
||||||
self._lines = []
|
self._lines = []
|
||||||
self._dict = dict
|
|
||||||
|
|
||||||
def _find(self, name):
|
def _find(self, name):
|
||||||
for line in self._lines:
|
for line in self._lines:
|
||||||
@ -103,7 +107,7 @@ class SectionLine(Line):
|
|||||||
return line
|
return line
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _add_option(self, optname, value = None, line = None, sep = '='):
|
def _add_option(self, optname, value=None, line=None, sep='='):
|
||||||
if value is None and line is None:
|
if value is None and line is None:
|
||||||
raise configparser.Error('Either value or line must be passed in')
|
raise configparser.Error('Either value or line must be passed in')
|
||||||
elif value and line:
|
elif value and line:
|
||||||
@ -124,10 +128,10 @@ class SectionLine(Line):
|
|||||||
return dict(self.items())
|
return dict(self.items())
|
||||||
|
|
||||||
def items(self):
|
def items(self):
|
||||||
return [ (i.name, i.value) for i in self._lines if i.type == 'option' ]
|
return [(i.name, i.value) for i in self._lines if i.type == 'option']
|
||||||
|
|
||||||
def keys(self):
|
def keys(self):
|
||||||
return [ i.name for i in self._lines ]
|
return [i.name for i in self._lines]
|
||||||
|
|
||||||
def __setitem__(self, key, val):
|
def __setitem__(self, key, val):
|
||||||
self._add_option(key, val)
|
self._add_option(key, val)
|
||||||
@ -155,12 +159,14 @@ class SectionLine(Line):
|
|||||||
|
|
||||||
class CommentLine(Line):
|
class CommentLine(Line):
|
||||||
"""Store a commentline"""
|
"""Store a commentline"""
|
||||||
|
|
||||||
def __init__(self, line):
|
def __init__(self, line):
|
||||||
Line.__init__(self, line.strip('\n'), 'comment')
|
super().__init__(line.strip('\n'), 'comment')
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
class OptionLine(Line):
|
class OptionLine(Line):
|
||||||
"""
|
"""
|
||||||
This class represents an option. The class' ``name`` attribute is used
|
This class represents an option. The class' ``name`` attribute is used
|
||||||
@ -177,7 +183,7 @@ class OptionLine(Line):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, optname, line):
|
def __init__(self, optname, line):
|
||||||
Line.__init__(self, optname, 'option')
|
super().__init__(optname, 'option')
|
||||||
self.name = optname
|
self.name = optname
|
||||||
self.format(line)
|
self.format(line)
|
||||||
|
|
||||||
@ -203,8 +209,9 @@ class OscConfigParser(configparser.ConfigParser):
|
|||||||
In order to keep the order and the format it makes use of the ConfigLineOrder()
|
In order to keep the order and the format it makes use of the ConfigLineOrder()
|
||||||
class.
|
class.
|
||||||
"""
|
"""
|
||||||
def __init__(self, defaults={}):
|
|
||||||
super().__init__(defaults)
|
def __init__(self, defaults=None):
|
||||||
|
super().__init__(defaults or {})
|
||||||
self._sections = ConfigLineOrder()
|
self._sections = ConfigLineOrder()
|
||||||
|
|
||||||
# XXX: unfortunately we have to override the _read() method from the ConfigParser()
|
# XXX: unfortunately we have to override the _read() method from the ConfigParser()
|
||||||
@ -280,7 +287,7 @@ class OscConfigParser(configparser.ConfigParser):
|
|||||||
# ';' is a comment delimiter only if it follows
|
# ';' is a comment delimiter only if it follows
|
||||||
# a spacing character
|
# a spacing character
|
||||||
pos = optval.find(';')
|
pos = optval.find(';')
|
||||||
if pos != -1 and optval[pos-1].isspace():
|
if pos != -1 and optval[pos - 1].isspace():
|
||||||
optval = optval[:pos]
|
optval = optval[:pos]
|
||||||
optval = optval.strip()
|
optval = optval.strip()
|
||||||
# allow empty values
|
# allow empty values
|
||||||
@ -301,9 +308,9 @@ class OscConfigParser(configparser.ConfigParser):
|
|||||||
e.append(lineno, repr(line))
|
e.append(lineno, repr(line))
|
||||||
# if any parsing errors occurred, raise an exception
|
# if any parsing errors occurred, raise an exception
|
||||||
if e:
|
if e:
|
||||||
raise e # pylint: disable-msg=E0702
|
raise e # pylint: disable-msg=E0702
|
||||||
|
|
||||||
def write(self, fp, comments = False):
|
def write(self, fp, comments=False):
|
||||||
"""
|
"""
|
||||||
write the configuration file. If comments is True all comments etc.
|
write the configuration file. If comments is True all comments etc.
|
||||||
will be written to fp otherwise the ConfigParsers' default write method
|
will be written to fp otherwise the ConfigParsers' default write method
|
||||||
|
@ -111,7 +111,7 @@ def run(prg, argv=None):
|
|||||||
if b'<summary>' in body:
|
if b'<summary>' in body:
|
||||||
msg = body.split(b'<summary>')[1]
|
msg = body.split(b'<summary>')[1]
|
||||||
msg = msg.split(b'</summary>')[0]
|
msg = msg.split(b'</summary>')[0]
|
||||||
msg = msg.replace(b'<', b'<').replace(b'>' , b'>').replace(b'&', b'&')
|
msg = msg.replace(b'<', b'<').replace(b'>', b'>').replace(b'&', b'&')
|
||||||
print(decode_it(msg), file=sys.stderr)
|
print(decode_it(msg), file=sys.stderr)
|
||||||
if e.code >= 500 and e.code <= 599:
|
if e.code >= 500 and e.code <= 599:
|
||||||
print('\nRequest: %s' % e.filename)
|
print('\nRequest: %s' % e.filename)
|
||||||
@ -164,9 +164,9 @@ def run(prg, argv=None):
|
|||||||
except (oscerr.PackageExists, oscerr.PackageMissing, oscerr.WorkingCopyInconsistent) as e:
|
except (oscerr.PackageExists, oscerr.PackageMissing, oscerr.WorkingCopyInconsistent) as e:
|
||||||
print(e.msg, file=sys.stderr)
|
print(e.msg, file=sys.stderr)
|
||||||
except oscerr.PackageInternalError as e:
|
except oscerr.PackageInternalError as e:
|
||||||
print('a package internal error occured\n' \
|
print('a package internal error occured\n'
|
||||||
'please file a bug and attach your current package working copy ' \
|
'please file a bug and attach your current package working copy '
|
||||||
'and the following traceback to it:', file=sys.stderr)
|
'and the following traceback to it:', file=sys.stderr)
|
||||||
print(e.msg, file=sys.stderr)
|
print(e.msg, file=sys.stderr)
|
||||||
traceback.print_exc(file=sys.stderr)
|
traceback.print_exc(file=sys.stderr)
|
||||||
except oscerr.PackageError as e:
|
except oscerr.PackageError as e:
|
||||||
|
268
osc/build.py
268
osc/build.py
@ -3,6 +3,7 @@
|
|||||||
# and distributed under the terms of the GNU General Public Licence,
|
# and distributed under the terms of the GNU General Public Licence,
|
||||||
# either version 2, or (at your option) any later version.
|
# either version 2, or (at your option) any later version.
|
||||||
|
|
||||||
|
import glob
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
@ -22,54 +23,58 @@ from .core import get_buildinfo, store_read_project, store_read_package, meta_ex
|
|||||||
from .core import get_binarylist, get_binary_file, run_external, return_external, raw_input
|
from .core import get_binarylist, get_binary_file, run_external, return_external, raw_input
|
||||||
from .fetch import Fetcher, OscFileGrabber, verify_pacs
|
from .fetch import Fetcher, OscFileGrabber, verify_pacs
|
||||||
from .meter import create_text_meter
|
from .meter import create_text_meter
|
||||||
from .util import rpmquery, debquery, archquery
|
from .util import cpio
|
||||||
|
from .util import archquery, debquery, packagequery, rpmquery
|
||||||
|
from .util import repodata
|
||||||
from .util.helper import decode_it
|
from .util.helper import decode_it
|
||||||
|
|
||||||
|
|
||||||
change_personality = {
|
change_personality = {
|
||||||
'i686': 'linux32',
|
'i686': 'linux32',
|
||||||
'i586': 'linux32',
|
'i586': 'linux32',
|
||||||
'i386': 'linux32',
|
'i386': 'linux32',
|
||||||
'ppc': 'powerpc32',
|
'ppc': 'powerpc32',
|
||||||
's390': 's390',
|
's390': 's390',
|
||||||
'sparc': 'linux32',
|
'sparc': 'linux32',
|
||||||
'sparcv8': 'linux32',
|
'sparcv8': 'linux32',
|
||||||
}
|
}
|
||||||
|
|
||||||
can_also_build = {
|
can_also_build = {
|
||||||
'aarch64': ['aarch64'], # only needed due to used heuristics in build parameter evaluation
|
'aarch64': ['aarch64'], # only needed due to used heuristics in build parameter evaluation
|
||||||
'armv6l': [ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ],
|
'armv6l': ['armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el'],
|
||||||
'armv7l': [ 'armv4l', 'armv5l', 'armv6l', 'armv7l', 'armv5el', 'armv6el', 'armv7el' ],
|
'armv7l': ['armv4l', 'armv5l', 'armv6l', 'armv7l', 'armv5el', 'armv6el', 'armv7el'],
|
||||||
'armv5el': [ 'armv4l', 'armv5l', 'armv5el' ], # not existing arch, just for compatibility
|
'armv5el': ['armv4l', 'armv5l', 'armv5el'], # not existing arch, just for compatibility
|
||||||
'armv6el': [ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ], # not existing arch, just for compatibility
|
'armv6el': ['armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el'], # not existing arch, just for compatibility
|
||||||
'armv6hl': [ 'armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el' ],
|
'armv6hl': ['armv4l', 'armv5l', 'armv6l', 'armv5el', 'armv6el'],
|
||||||
'armv7el': [ 'armv4l', 'armv5l', 'armv6l', 'armv7l', 'armv5el', 'armv6el', 'armv7el' ], # not existing arch, just for compatibility
|
'armv7el': ['armv4l', 'armv5l', 'armv6l', 'armv7l', 'armv5el', 'armv6el', 'armv7el'], # not existing arch, just for compatibility
|
||||||
'armv7hl': [ 'armv7hl' ], # not existing arch, just for compatibility
|
'armv7hl': ['armv7hl'], # not existing arch, just for compatibility
|
||||||
'armv8el': [ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el' ], # not existing arch, just for compatibility
|
'armv8el': ['armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el'], # not existing arch, just for compatibility
|
||||||
'armv8l': [ 'armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el' ], # not existing arch, just for compatibility
|
'armv8l': ['armv4l', 'armv5el', 'armv6el', 'armv7el', 'armv8el'], # not existing arch, just for compatibility
|
||||||
'armv5tel': [ 'armv4l', 'armv5el', 'armv5tel' ],
|
'armv5tel': ['armv4l', 'armv5el', 'armv5tel'],
|
||||||
's390x': ['s390' ],
|
's390x': ['s390'],
|
||||||
'ppc64': [ 'ppc', 'ppc64', 'ppc64p7', 'ppc64le' ],
|
'ppc64': ['ppc', 'ppc64', 'ppc64p7', 'ppc64le'],
|
||||||
'ppc64le': [ 'ppc64le', 'ppc64' ],
|
'ppc64le': ['ppc64le', 'ppc64'],
|
||||||
'i586': [ 'i386' ],
|
'i586': ['i386'],
|
||||||
'i686': [ 'i586', 'i386' ],
|
'i686': ['i586', 'i386'],
|
||||||
'x86_64': ['i686', 'i586', 'i386' ],
|
'x86_64': ['i686', 'i586', 'i386'],
|
||||||
'sparc64': ['sparc64v', 'sparcv9v', 'sparcv9', 'sparcv8', 'sparc'],
|
'sparc64': ['sparc64v', 'sparcv9v', 'sparcv9', 'sparcv8', 'sparc'],
|
||||||
'parisc': ['hppa'],
|
'parisc': ['hppa'],
|
||||||
}
|
}
|
||||||
|
|
||||||
# real arch of this machine
|
# real arch of this machine
|
||||||
hostarch = os.uname()[4]
|
hostarch = os.uname()[4]
|
||||||
if hostarch == 'i686': # FIXME
|
if hostarch == 'i686': # FIXME
|
||||||
hostarch = 'i586'
|
hostarch = 'i586'
|
||||||
|
|
||||||
if hostarch == 'parisc':
|
if hostarch == 'parisc':
|
||||||
hostarch = 'hppa'
|
hostarch = 'hppa'
|
||||||
|
|
||||||
|
|
||||||
class Buildinfo:
|
class Buildinfo:
|
||||||
"""represent the contents of a buildinfo file"""
|
"""represent the contents of a buildinfo file"""
|
||||||
|
|
||||||
def __init__(self, filename, apiurl, buildtype = 'spec', localpkgs = [], binarytype = 'rpm'):
|
def __init__(self, filename, apiurl, buildtype='spec', localpkgs=None, binarytype='rpm'):
|
||||||
|
localpkgs = localpkgs or []
|
||||||
try:
|
try:
|
||||||
tree = ET.parse(filename)
|
tree = ET.parse(filename)
|
||||||
except:
|
except:
|
||||||
@ -81,7 +86,7 @@ class Buildinfo:
|
|||||||
|
|
||||||
self.apiurl = apiurl
|
self.apiurl = apiurl
|
||||||
|
|
||||||
if root.find('error') != None:
|
if root.find('error') is not None:
|
||||||
sys.stderr.write('buildinfo is broken... it says:\n')
|
sys.stderr.write('buildinfo is broken... it says:\n')
|
||||||
error = root.find('error').text
|
error = root.find('error').text
|
||||||
if error.startswith('unresolvable: '):
|
if error.startswith('unresolvable: '):
|
||||||
@ -118,16 +123,16 @@ class Buildinfo:
|
|||||||
# hostarch: The architecture of the build environment (build arch in GNU defintion)
|
# hostarch: The architecture of the build environment (build arch in GNU defintion)
|
||||||
# crossarch: Same as hostarch, but indicating that a sysroot with an incompatible architecture exists
|
# crossarch: Same as hostarch, but indicating that a sysroot with an incompatible architecture exists
|
||||||
self.buildarch = root.find('arch').text
|
self.buildarch = root.find('arch').text
|
||||||
if root.find('crossarch') != None:
|
if root.find('crossarch') is not None:
|
||||||
self.crossarch = root.find('crossarch').text
|
self.crossarch = root.find('crossarch').text
|
||||||
else:
|
else:
|
||||||
self.crossarch = None
|
self.crossarch = None
|
||||||
if root.find('hostarch') != None:
|
if root.find('hostarch') is not None:
|
||||||
self.hostarch = root.find('hostarch').text
|
self.hostarch = root.find('hostarch').text
|
||||||
else:
|
else:
|
||||||
self.hostarch = None
|
self.hostarch = None
|
||||||
|
|
||||||
if root.find('release') != None:
|
if root.find('release') is not None:
|
||||||
self.release = root.find('release').text
|
self.release = root.find('release').text
|
||||||
else:
|
else:
|
||||||
self.release = None
|
self.release = None
|
||||||
@ -141,7 +146,7 @@ class Buildinfo:
|
|||||||
self.downloadurl = root.get('downloadurl')
|
self.downloadurl = root.get('downloadurl')
|
||||||
|
|
||||||
self.debuginfo = 0
|
self.debuginfo = 0
|
||||||
if root.find('debuginfo') != None:
|
if root.find('debuginfo') is not None:
|
||||||
try:
|
try:
|
||||||
self.debuginfo = int(root.find('debuginfo').text)
|
self.debuginfo = int(root.find('debuginfo').text)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@ -162,8 +167,8 @@ class Buildinfo:
|
|||||||
apiurl, localpkgs)
|
apiurl, localpkgs)
|
||||||
else:
|
else:
|
||||||
pac_arch = self.crossarch
|
pac_arch = self.crossarch
|
||||||
if pac_arch == None:
|
if pac_arch is None:
|
||||||
pac_arch = self.buildarch
|
pac_arch = self.buildarch
|
||||||
p = Pac(node, pac_arch, self.pacsuffix,
|
p = Pac(node, pac_arch, self.pacsuffix,
|
||||||
apiurl, localpkgs)
|
apiurl, localpkgs)
|
||||||
if p.project:
|
if p.project:
|
||||||
@ -172,27 +177,26 @@ class Buildinfo:
|
|||||||
for node in root.findall('path'):
|
for node in root.findall('path'):
|
||||||
# old simple list for compatibility
|
# old simple list for compatibility
|
||||||
# XXX: really old? This is currently used for kiwi builds
|
# XXX: really old? This is currently used for kiwi builds
|
||||||
self.pathes.append(node.get('project')+"/"+node.get('repository'))
|
self.pathes.append(node.get('project') + "/" + node.get('repository'))
|
||||||
# a hash providing the matching URL for specific repos for newer OBS instances
|
# a hash providing the matching URL for specific repos for newer OBS instances
|
||||||
if node.get('url'):
|
if node.get('url'):
|
||||||
baseurl = node.get('url').replace('%', '%%')
|
baseurl = node.get('url').replace('%', '%%')
|
||||||
if config['api_host_options'][apiurl]['downloadurl']:
|
if config['api_host_options'][apiurl]['downloadurl']:
|
||||||
# Add the path element to the download url override.
|
# Add the path element to the download url override.
|
||||||
baseurl = config['api_host_options'][apiurl]['downloadurl'] + urlsplit(node.get('url'))[2]
|
baseurl = config['api_host_options'][apiurl]['downloadurl'] + urlsplit(node.get('url'))[2]
|
||||||
self.urls[node.get('project')+"/"+node.get('repository')] = baseurl + '/%(arch)s/%(filename)s'
|
self.urls[node.get('project') + "/" + node.get('repository')] = baseurl + '/%(arch)s/%(filename)s'
|
||||||
|
|
||||||
self.vminstall_list = [ dep.name for dep in self.deps if dep.vminstall ]
|
self.vminstall_list = [dep.name for dep in self.deps if dep.vminstall]
|
||||||
self.preinstall_list = [ dep.name for dep in self.deps if dep.preinstall ]
|
self.preinstall_list = [dep.name for dep in self.deps if dep.preinstall]
|
||||||
self.runscripts_list = [ dep.name for dep in self.deps if dep.runscripts ]
|
self.runscripts_list = [dep.name for dep in self.deps if dep.runscripts]
|
||||||
self.noinstall_list = [ dep.name for dep in self.deps if dep.noinstall ]
|
self.noinstall_list = [dep.name for dep in self.deps if dep.noinstall]
|
||||||
self.installonly_list = [ dep.name for dep in self.deps if dep.installonly ]
|
self.installonly_list = [dep.name for dep in self.deps if dep.installonly]
|
||||||
|
|
||||||
if root.find('preinstallimage') != None:
|
if root.find('preinstallimage') is not None:
|
||||||
self.preinstallimage = root.find('preinstallimage')
|
self.preinstallimage = root.find('preinstallimage')
|
||||||
else:
|
else:
|
||||||
self.preinstallimage = None
|
self.preinstallimage = None
|
||||||
|
|
||||||
|
|
||||||
def has_dep(self, name):
|
def has_dep(self, name):
|
||||||
for i in self.deps:
|
for i in self.deps:
|
||||||
if i.name == name:
|
if i.name == name:
|
||||||
@ -211,15 +215,16 @@ class Pac:
|
|||||||
|
|
||||||
We build a map that's later used to fill our URL templates
|
We build a map that's later used to fill our URL templates
|
||||||
"""
|
"""
|
||||||
def __init__(self, node, buildarch, pacsuffix, apiurl, localpkgs = []):
|
|
||||||
|
|
||||||
|
def __init__(self, node, buildarch, pacsuffix, apiurl, localpkgs=None):
|
||||||
|
localpkgs = localpkgs or []
|
||||||
self.mp = {}
|
self.mp = {}
|
||||||
for i in ['binary', 'package',
|
for i in ['binary', 'package',
|
||||||
'epoch', 'version', 'release', 'hdrmd5',
|
'epoch', 'version', 'release', 'hdrmd5',
|
||||||
'project', 'repository', 'sysroot',
|
'project', 'repository', 'sysroot',
|
||||||
'preinstall', 'vminstall', 'runscripts',
|
'preinstall', 'vminstall', 'runscripts',
|
||||||
'noinstall', 'installonly', 'notmeta',
|
'noinstall', 'installonly', 'notmeta',
|
||||||
]:
|
]:
|
||||||
self.mp[i] = node.get(i)
|
self.mp[i] = node.get(i)
|
||||||
|
|
||||||
self.mp['buildarch'] = buildarch
|
self.mp['buildarch'] = buildarch
|
||||||
@ -229,10 +234,10 @@ class Pac:
|
|||||||
self.mp['name'] = node.get('name') or self.mp['binary']
|
self.mp['name'] = node.get('name') or self.mp['binary']
|
||||||
|
|
||||||
# this is not the ideal place to check if the package is a localdep or not
|
# this is not the ideal place to check if the package is a localdep or not
|
||||||
localdep = self.mp['name'] in localpkgs # and not self.mp['noinstall']
|
localdep = self.mp['name'] in localpkgs # and not self.mp['noinstall']
|
||||||
if not localdep and not (node.get('project') and node.get('repository')):
|
if not localdep and not (node.get('project') and node.get('repository')):
|
||||||
raise oscerr.APIError('incomplete information for package %s, may be caused by a broken project configuration.'
|
raise oscerr.APIError('incomplete information for package %s, may be caused by a broken project configuration.'
|
||||||
% self.mp['name'] )
|
% self.mp['name'])
|
||||||
|
|
||||||
if not localdep:
|
if not localdep:
|
||||||
self.mp['extproject'] = node.get('project').replace(':', ':/')
|
self.mp['extproject'] = node.get('project').replace(':', ':/')
|
||||||
@ -243,7 +248,7 @@ class Pac:
|
|||||||
if pacsuffix == 'deb' and not (self.mp['name'] and self.mp['arch'] and self.mp['version']):
|
if pacsuffix == 'deb' and not (self.mp['name'] and self.mp['arch'] and self.mp['version']):
|
||||||
raise oscerr.APIError(
|
raise oscerr.APIError(
|
||||||
"buildinfo for package %s/%s/%s is incomplete"
|
"buildinfo for package %s/%s/%s is incomplete"
|
||||||
% (self.mp['name'], self.mp['arch'], self.mp['version']))
|
% (self.mp['name'], self.mp['arch'], self.mp['version']))
|
||||||
|
|
||||||
self.mp['apiurl'] = apiurl
|
self.mp['apiurl'] = apiurl
|
||||||
|
|
||||||
@ -278,7 +283,6 @@ class Pac:
|
|||||||
# make the content of the dictionary accessible as class attributes
|
# make the content of the dictionary accessible as class attributes
|
||||||
self.__dict__.update(self.mp)
|
self.__dict__.update(self.mp)
|
||||||
|
|
||||||
|
|
||||||
def makeurls(self, cachedir, urllist):
|
def makeurls(self, cachedir, urllist):
|
||||||
self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.repoarch)
|
self.localdir = '%s/%s/%s/%s' % (cachedir, self.project, self.repository, self.repoarch)
|
||||||
self.fullfilename = os.path.join(self.localdir, self.canonname)
|
self.fullfilename = os.path.join(self.localdir, self.canonname)
|
||||||
@ -347,6 +351,7 @@ def get_preinstall_image(apiurl, arch, cache_dir, img_info, offline=False):
|
|||||||
os.rename(ifile_path_part, ifile_path)
|
os.rename(ifile_path_part, ifile_path)
|
||||||
return (imagefile, imagesource, img_bins)
|
return (imagefile, imagesource, img_bins)
|
||||||
|
|
||||||
|
|
||||||
def get_built_files(pacdir, buildtype):
|
def get_built_files(pacdir, buildtype):
|
||||||
if buildtype == 'spec':
|
if buildtype == 'spec':
|
||||||
debs_dir = os.path.join(pacdir, 'DEBS')
|
debs_dir = os.path.join(pacdir, 'DEBS')
|
||||||
@ -438,6 +443,7 @@ def get_built_files(pacdir, buildtype):
|
|||||||
s_built = ''
|
s_built = ''
|
||||||
return s_built, b_built
|
return s_built, b_built
|
||||||
|
|
||||||
|
|
||||||
def get_repo(path):
|
def get_repo(path):
|
||||||
"""Walks up path looking for any repodata directories.
|
"""Walks up path looking for any repodata directories.
|
||||||
|
|
||||||
@ -464,9 +470,8 @@ def get_repo(path):
|
|||||||
|
|
||||||
return repositoryDirectory
|
return repositoryDirectory
|
||||||
|
|
||||||
|
|
||||||
def get_prefer_pkgs(dirs, wanted_arch, type, cpio):
|
def get_prefer_pkgs(dirs, wanted_arch, type, cpio):
|
||||||
import glob
|
|
||||||
from .util import repodata, packagequery
|
|
||||||
paths = []
|
paths = []
|
||||||
repositories = []
|
repositories = []
|
||||||
|
|
||||||
@ -501,7 +506,7 @@ def get_prefer_pkgs(dirs, wanted_arch, type, cpio):
|
|||||||
packageQueries.add(packageQuery)
|
packageQueries.add(packageQuery)
|
||||||
|
|
||||||
prefer_pkgs = {decode_it(name): packageQuery.path()
|
prefer_pkgs = {decode_it(name): packageQuery.path()
|
||||||
for name, packageQuery in packageQueries.items()}
|
for name, packageQuery in packageQueries.items()}
|
||||||
|
|
||||||
depfile = create_deps(packageQueries.values())
|
depfile = create_deps(packageQueries.values())
|
||||||
cpio.add(b'deps', b'\n'.join(depfile))
|
cpio.add(b'deps', b'\n'.join(depfile))
|
||||||
@ -539,14 +544,16 @@ trustprompt = """Would you like to ...
|
|||||||
1 - always trust packages from '%(project)s'
|
1 - always trust packages from '%(project)s'
|
||||||
2 - trust packages just this time
|
2 - trust packages just this time
|
||||||
? """
|
? """
|
||||||
|
|
||||||
|
|
||||||
def check_trusted_projects(apiurl, projects):
|
def check_trusted_projects(apiurl, projects):
|
||||||
trusted = config['api_host_options'][apiurl]['trusted_prj']
|
trusted = config['api_host_options'][apiurl]['trusted_prj']
|
||||||
tlen = len(trusted)
|
tlen = len(trusted)
|
||||||
for prj in projects:
|
for prj in projects:
|
||||||
if not prj in trusted:
|
if prj not in trusted:
|
||||||
print("\nThe build root needs packages from project '%s'." % prj)
|
print("\nThe build root needs packages from project '%s'." % prj)
|
||||||
print("Note that malicious packages can compromise the build result or even your system.")
|
print("Note that malicious packages can compromise the build result or even your system.")
|
||||||
r = raw_input(trustprompt % { 'project': prj })
|
r = raw_input(trustprompt % {'project': prj})
|
||||||
if r == '1':
|
if r == '1':
|
||||||
print("adding '%s' to oscrc: ['%s']['trusted_prj']" % (prj, apiurl))
|
print("adding '%s' to oscrc: ['%s']['trusted_prj']" % (prj, apiurl))
|
||||||
trusted.append(prj)
|
trusted.append(prj)
|
||||||
@ -558,6 +565,7 @@ def check_trusted_projects(apiurl, projects):
|
|||||||
config['api_host_options'][apiurl]['trusted_prj'] = trusted
|
config['api_host_options'][apiurl]['trusted_prj'] = trusted
|
||||||
conf.config_set_option(apiurl, 'trusted_prj', ' '.join(trusted))
|
conf.config_set_option(apiurl, 'trusted_prj', ' '.join(trusted))
|
||||||
|
|
||||||
|
|
||||||
def get_kiwipath_from_buildinfo(bi, prj, repo):
|
def get_kiwipath_from_buildinfo(bi, prj, repo):
|
||||||
# If the project does not have a path defined we need to get the config
|
# If the project does not have a path defined we need to get the config
|
||||||
# via the repositories in the kiwi file. Unfortunately the buildinfo
|
# via the repositories in the kiwi file. Unfortunately the buildinfo
|
||||||
@ -573,6 +581,7 @@ def get_kiwipath_from_buildinfo(bi, prj, repo):
|
|||||||
kiwipath.insert(0, myprp)
|
kiwipath.insert(0, myprp)
|
||||||
return kiwipath
|
return kiwipath
|
||||||
|
|
||||||
|
|
||||||
def calculate_prj_pac(opts, descr):
|
def calculate_prj_pac(opts, descr):
|
||||||
project = opts.alternative_project or store_read_project('.')
|
project = opts.alternative_project or store_read_project('.')
|
||||||
if opts.local_package:
|
if opts.local_package:
|
||||||
@ -581,16 +590,19 @@ def calculate_prj_pac(opts, descr):
|
|||||||
package = store_read_package('.')
|
package = store_read_package('.')
|
||||||
return project, package
|
return project, package
|
||||||
|
|
||||||
|
|
||||||
def calculate_build_root(apihost, prj, pac, repo, arch):
|
def calculate_build_root(apihost, prj, pac, repo, arch):
|
||||||
buildroot = os.environ.get('OSC_BUILD_ROOT', config['build-root']) \
|
buildroot = os.environ.get('OSC_BUILD_ROOT', config['build-root']) \
|
||||||
% {'repo': repo, 'arch': arch, 'project': prj, 'package': pac, 'apihost': apihost}
|
% {'repo': repo, 'arch': arch, 'project': prj, 'package': pac, 'apihost': apihost}
|
||||||
return buildroot
|
return buildroot
|
||||||
|
|
||||||
|
|
||||||
def build_as_user():
|
def build_as_user():
|
||||||
if os.environ.get('OSC_SU_WRAPPER', config['su-wrapper']).split():
|
if os.environ.get('OSC_SU_WRAPPER', config['su-wrapper']).split():
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def su_wrapper(cmd):
|
def su_wrapper(cmd):
|
||||||
sucmd = os.environ.get('OSC_SU_WRAPPER', config['su-wrapper']).split()
|
sucmd = os.environ.get('OSC_SU_WRAPPER', config['su-wrapper']).split()
|
||||||
if sucmd:
|
if sucmd:
|
||||||
@ -602,6 +614,7 @@ def su_wrapper(cmd):
|
|||||||
cmd = sucmd + cmd
|
cmd = sucmd + cmd
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def run_build(opts, *args):
|
def run_build(opts, *args):
|
||||||
cmd = [config['build-cmd']]
|
cmd = [config['build-cmd']]
|
||||||
cmd += args
|
cmd += args
|
||||||
@ -612,6 +625,7 @@ def run_build(opts, *args):
|
|||||||
cmd.append('--norootforbuild')
|
cmd.append('--norootforbuild')
|
||||||
return run_external(cmd[0], *cmd[1:])
|
return run_external(cmd[0], *cmd[1:])
|
||||||
|
|
||||||
|
|
||||||
def main(apiurl, opts, argv):
|
def main(apiurl, opts, argv):
|
||||||
|
|
||||||
repo = argv[0]
|
repo = argv[0]
|
||||||
@ -758,7 +772,8 @@ def main(apiurl, opts, argv):
|
|||||||
for var in ['OSC_PACKAGECACHEDIR', 'OSC_SU_WRAPPER', 'OSC_BUILD_ROOT']:
|
for var in ['OSC_PACKAGECACHEDIR', 'OSC_SU_WRAPPER', 'OSC_BUILD_ROOT']:
|
||||||
val = os.getenv(var)
|
val = os.getenv(var)
|
||||||
if val:
|
if val:
|
||||||
if var.startswith('OSC_'): var = var[4:]
|
if var.startswith('OSC_'):
|
||||||
|
var = var[4:]
|
||||||
var = var.lower().replace('_', '-')
|
var = var.lower().replace('_', '-')
|
||||||
if var in config:
|
if var in config:
|
||||||
print('Overriding config value for %s=\'%s\' with \'%s\'' % (var, config[var], val))
|
print('Overriding config value for %s=\'%s\' with \'%s\'' % (var, config[var], val))
|
||||||
@ -781,7 +796,7 @@ def main(apiurl, opts, argv):
|
|||||||
build_root = config['api_host_options'][apiurl].get('build-root', build_root)
|
build_root = config['api_host_options'][apiurl].get('build-root', build_root)
|
||||||
try:
|
try:
|
||||||
build_root = build_root % {'repo': repo, 'arch': arch,
|
build_root = build_root % {'repo': repo, 'arch': arch,
|
||||||
'project': prj, 'package': pacname, 'apihost': apihost}
|
'project': prj, 'package': pacname, 'apihost': apihost}
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -887,13 +902,11 @@ def main(apiurl, opts, argv):
|
|||||||
else:
|
else:
|
||||||
print('Using local buildenv file: %s' % os.path.basename(buildenvfile))
|
print('Using local buildenv file: %s' % os.path.basename(buildenvfile))
|
||||||
if buildenvfile or servicefile:
|
if buildenvfile or servicefile:
|
||||||
from .util import cpio
|
|
||||||
if not cpiodata:
|
if not cpiodata:
|
||||||
cpiodata = cpio.CpioWrite()
|
cpiodata = cpio.CpioWrite()
|
||||||
|
|
||||||
if opts.prefer_pkgs:
|
if opts.prefer_pkgs:
|
||||||
print('Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs))
|
print('Scanning the following dirs for local packages: %s' % ', '.join(opts.prefer_pkgs))
|
||||||
from .util import cpio
|
|
||||||
if not cpiodata:
|
if not cpiodata:
|
||||||
cpiodata = cpio.CpioWrite()
|
cpiodata = cpio.CpioWrite()
|
||||||
prefer_pkgs = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type, cpiodata)
|
prefer_pkgs = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type, cpiodata)
|
||||||
@ -919,12 +932,12 @@ def main(apiurl, opts, argv):
|
|||||||
myrsyncdest = os.path.expandvars(opts.rsyncdest)
|
myrsyncdest = os.path.expandvars(opts.rsyncdest)
|
||||||
if not os.path.isabs(myrsyncdest):
|
if not os.path.isabs(myrsyncdest):
|
||||||
raise oscerr.WrongOptions('--rsync-dest %s is no absolute path (starting with \'/\')!' % opts.rsyncdest)
|
raise oscerr.WrongOptions('--rsync-dest %s is no absolute path (starting with \'/\')!' % opts.rsyncdest)
|
||||||
specialcmdopts = ['--rsync-src='+myrsyncsrc, '--rsync-dest='+myrsyncdest]
|
specialcmdopts = ['--rsync-src=' + myrsyncsrc, '--rsync-dest=' + myrsyncdest]
|
||||||
if opts.overlay:
|
if opts.overlay:
|
||||||
myoverlay = os.path.abspath(os.path.expanduser(os.path.expandvars(opts.overlay)))
|
myoverlay = os.path.abspath(os.path.expanduser(os.path.expandvars(opts.overlay)))
|
||||||
if not os.path.isdir(myoverlay):
|
if not os.path.isdir(myoverlay):
|
||||||
raise oscerr.WrongOptions('--overlay %s is no valid directory!' % opts.overlay)
|
raise oscerr.WrongOptions('--overlay %s is no valid directory!' % opts.overlay)
|
||||||
specialcmdopts += ['--overlay='+myoverlay]
|
specialcmdopts += ['--overlay=' + myoverlay]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if opts.noinit:
|
if opts.noinit:
|
||||||
@ -954,12 +967,12 @@ def main(apiurl, opts, argv):
|
|||||||
|
|
||||||
print('Getting buildinfo from server and store to %s' % bi_filename)
|
print('Getting buildinfo from server and store to %s' % bi_filename)
|
||||||
bi_text = decode_it(get_buildinfo(apiurl,
|
bi_text = decode_it(get_buildinfo(apiurl,
|
||||||
prj,
|
prj,
|
||||||
pac,
|
pac,
|
||||||
repo,
|
repo,
|
||||||
arch,
|
arch,
|
||||||
specfile=build_descr_data,
|
specfile=build_descr_data,
|
||||||
addlist=extra_pkgs))
|
addlist=extra_pkgs))
|
||||||
if not bi_file:
|
if not bi_file:
|
||||||
bi_file = open(bi_filename, 'w')
|
bi_file = open(bi_filename, 'w')
|
||||||
# maybe we should check for errors before saving the file
|
# maybe we should check for errors before saving the file
|
||||||
@ -982,19 +995,19 @@ def main(apiurl, opts, argv):
|
|||||||
try:
|
try:
|
||||||
# take care, not to run into double trouble.
|
# take care, not to run into double trouble.
|
||||||
pkg_meta_e = meta_exists(metatype='pkg', path_args=(quote_plus(prj),
|
pkg_meta_e = meta_exists(metatype='pkg', path_args=(quote_plus(prj),
|
||||||
quote_plus(pac)), template_args=None, create_new=False,
|
quote_plus(pac)), template_args=None, create_new=False,
|
||||||
apiurl=apiurl)
|
apiurl=apiurl)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if pkg_meta_e:
|
if pkg_meta_e:
|
||||||
print('ERROR: Either wrong repo/arch as parameter or a parse error of .spec/.dsc/.kiwi file due to syntax error', file=sys.stderr)
|
print('ERROR: Either wrong repo/arch as parameter or a parse error of .spec/.dsc/.kiwi file due to syntax error', file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
print('The package \'%s\' does not exist - please ' \
|
print('The package \'%s\' does not exist - please '
|
||||||
'rerun with \'--local-package\'' % pac, file=sys.stderr)
|
'rerun with \'--local-package\'' % pac, file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
print('The project \'%s\' does not exist - please ' \
|
print('The project \'%s\' does not exist - please '
|
||||||
'rerun with \'--alternative-project <alternative_project>\'' % prj, file=sys.stderr)
|
'rerun with \'--alternative-project <alternative_project>\'' % prj, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
@ -1030,12 +1043,12 @@ def main(apiurl, opts, argv):
|
|||||||
# vs.
|
# vs.
|
||||||
# arch we are supposed to build for
|
# arch we are supposed to build for
|
||||||
if vm_type != "emulator" and vm_type != "qemu":
|
if vm_type != "emulator" and vm_type != "qemu":
|
||||||
if bi.hostarch != None:
|
if bi.hostarch is not None:
|
||||||
if hostarch != bi.hostarch and not bi.hostarch in can_also_build.get(hostarch, []):
|
if hostarch != bi.hostarch and bi.hostarch not in can_also_build.get(hostarch, []):
|
||||||
print('Error: hostarch \'%s\' is required.' % (bi.hostarch), file=sys.stderr)
|
print('Error: hostarch \'%s\' is required.' % (bi.hostarch), file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
elif hostarch != bi.buildarch:
|
elif hostarch != bi.buildarch:
|
||||||
if not bi.buildarch in can_also_build.get(hostarch, []):
|
if bi.buildarch not in can_also_build.get(hostarch, []):
|
||||||
print('WARNING: It is guessed to build on hostarch \'%s\' for \'%s\' via QEMU user emulation.' % (hostarch, bi.buildarch), file=sys.stderr)
|
print('WARNING: It is guessed to build on hostarch \'%s\' for \'%s\' via QEMU user emulation.' % (hostarch, bi.buildarch), file=sys.stderr)
|
||||||
|
|
||||||
rpmlist_prefers = []
|
rpmlist_prefers = []
|
||||||
@ -1060,7 +1073,7 @@ def main(apiurl, opts, argv):
|
|||||||
if 'urllist' in config:
|
if 'urllist' in config:
|
||||||
if isinstance(config['urllist'], str):
|
if isinstance(config['urllist'], str):
|
||||||
re_clist = re.compile('[, ]+')
|
re_clist = re.compile('[, ]+')
|
||||||
urllist = [ i.strip() for i in re_clist.split(config['urllist'].strip()) ]
|
urllist = [i.strip() for i in re_clist.split(config['urllist'].strip())]
|
||||||
else:
|
else:
|
||||||
urllist = config['urllist']
|
urllist = config['urllist']
|
||||||
|
|
||||||
@ -1070,21 +1083,20 @@ def main(apiurl, opts, argv):
|
|||||||
if bi.downloadurl:
|
if bi.downloadurl:
|
||||||
urllist.append(bi.downloadurl.replace('%', '%%') + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s')
|
urllist.append(bi.downloadurl.replace('%', '%%') + '/%(extproject)s/%(extrepository)s/%(arch)s/%(filename)s')
|
||||||
if opts.disable_cpio_bulk_download:
|
if opts.disable_cpio_bulk_download:
|
||||||
urllist.append( '%(apiurl)s/build/%(project)s/%(repository)s/%(repoarch)s/%(repopackage)s/%(repofilename)s' )
|
urllist.append('%(apiurl)s/build/%(project)s/%(repository)s/%(repoarch)s/%(repopackage)s/%(repofilename)s')
|
||||||
|
|
||||||
fetcher = Fetcher(cache_dir,
|
fetcher = Fetcher(cache_dir,
|
||||||
urllist = urllist,
|
urllist=urllist,
|
||||||
api_host_options = config['api_host_options'],
|
offline=opts.noinit or opts.offline,
|
||||||
offline = opts.noinit or opts.offline,
|
http_debug=config['http_debug'],
|
||||||
http_debug = config['http_debug'],
|
modules=bi.modules,
|
||||||
modules = bi.modules,
|
|
||||||
enable_cpio=not opts.disable_cpio_bulk_download and bi.enable_cpio,
|
enable_cpio=not opts.disable_cpio_bulk_download and bi.enable_cpio,
|
||||||
cookiejar=connection.CookieJarAuthHandler(apiurl, os.path.expanduser(conf.config["cookiejar"]))._cookiejar,
|
cookiejar=connection.CookieJarAuthHandler(apiurl, os.path.expanduser(conf.config["cookiejar"]))._cookiejar,
|
||||||
download_api_only=opts.download_api_only)
|
download_api_only=opts.download_api_only)
|
||||||
|
|
||||||
if not opts.trust_all_projects:
|
if not opts.trust_all_projects:
|
||||||
# implicitly trust the project we are building for
|
# implicitly trust the project we are building for
|
||||||
check_trusted_projects(apiurl, [ i for i in bi.projects.keys() if not i == prj ])
|
check_trusted_projects(apiurl, [i for i in bi.projects.keys() if not i == prj])
|
||||||
|
|
||||||
imagefile = ''
|
imagefile = ''
|
||||||
imagesource = ''
|
imagesource = ''
|
||||||
@ -1116,7 +1128,7 @@ def main(apiurl, opts, argv):
|
|||||||
if opts.oldpackages:
|
if opts.oldpackages:
|
||||||
old_pkg_dir = opts.oldpackages
|
old_pkg_dir = opts.oldpackages
|
||||||
if not old_pkg_dir.startswith('/') and not opts.offline:
|
if not old_pkg_dir.startswith('/') and not opts.offline:
|
||||||
data = [ prj, pacname, repo, arch]
|
data = [prj, pacname, repo, arch]
|
||||||
if old_pkg_dir == '_link':
|
if old_pkg_dir == '_link':
|
||||||
p = core.findpacs(os.curdir)[0]
|
p = core.findpacs(os.curdir)[0]
|
||||||
if not p.islink():
|
if not p.islink():
|
||||||
@ -1144,15 +1156,20 @@ def main(apiurl, opts, argv):
|
|||||||
if binaries:
|
if binaries:
|
||||||
class mytmpdir:
|
class mytmpdir:
|
||||||
""" temporary directory that removes itself"""
|
""" temporary directory that removes itself"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.name = mkdtemp(*args, **kwargs)
|
self.name = mkdtemp(*args, **kwargs)
|
||||||
_rmtree = staticmethod(shutil.rmtree)
|
_rmtree = staticmethod(shutil.rmtree)
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
self._rmtree(self.name)
|
self._rmtree(self.name)
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
self.cleanup()
|
self.cleanup()
|
||||||
|
|
||||||
def __exit__(self):
|
def __exit__(self):
|
||||||
self.cleanup()
|
self.cleanup()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
@ -1170,12 +1187,12 @@ def main(apiurl, opts, argv):
|
|||||||
data[0],
|
data[0],
|
||||||
data[2], data[3],
|
data[2], data[3],
|
||||||
i.name,
|
i.name,
|
||||||
package = data[1],
|
package=data[1],
|
||||||
target_filename = fname,
|
target_filename=fname,
|
||||||
target_mtime = i.mtime,
|
target_mtime=i.mtime,
|
||||||
progress_meter = True)
|
progress_meter=True)
|
||||||
|
|
||||||
if old_pkg_dir != None:
|
if old_pkg_dir is not None:
|
||||||
buildargs.append('--oldpackages=%s' % old_pkg_dir)
|
buildargs.append('--oldpackages=%s' % old_pkg_dir)
|
||||||
|
|
||||||
# Make packages from buildinfo available as repos for kiwi/docker/fissile
|
# Make packages from buildinfo available as repos for kiwi/docker/fissile
|
||||||
@ -1203,22 +1220,22 @@ def main(apiurl, opts, argv):
|
|||||||
filename = sffn.split("/")[-1]
|
filename = sffn.split("/")[-1]
|
||||||
# project/repo
|
# project/repo
|
||||||
if i.name.startswith("container:"):
|
if i.name.startswith("container:"):
|
||||||
prdir = "containers/"+pdir+"/"+rdir
|
prdir = "containers/" + pdir + "/" + rdir
|
||||||
pradir = prdir
|
pradir = prdir
|
||||||
filename = filename[10:]
|
filename = filename[10:]
|
||||||
if build_type == 'kiwi':
|
if build_type == 'kiwi':
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('--set-container-derived-from=dir://./' + prdir + "/" + filename)
|
buildargs.append('--set-container-derived-from=dir://./' + prdir + "/" + filename)
|
||||||
else:
|
else:
|
||||||
prdir = "repos/"+pdir+"/"+rdir
|
prdir = "repos/" + pdir + "/" + rdir
|
||||||
# project/repo/arch
|
# project/repo/arch
|
||||||
pradir = prdir+"/"+adir
|
pradir = prdir + "/" + adir
|
||||||
# target fullfilename
|
# target fullfilename
|
||||||
tffn = pradir+"/"+filename
|
tffn = pradir + "/" + filename
|
||||||
if not os.path.exists(os.path.join(pradir)):
|
if not os.path.exists(os.path.join(pradir)):
|
||||||
os.makedirs(os.path.join(pradir))
|
os.makedirs(os.path.join(pradir))
|
||||||
if not os.path.exists(tffn):
|
if not os.path.exists(tffn):
|
||||||
print("Using package: "+sffn)
|
print("Using package: " + sffn)
|
||||||
if opts.linksources:
|
if opts.linksources:
|
||||||
os.link(sffn, tffn)
|
os.link(sffn, tffn)
|
||||||
else:
|
else:
|
||||||
@ -1277,19 +1294,19 @@ def main(apiurl, opts, argv):
|
|||||||
for xml in root.findall('repository'):
|
for xml in root.findall('repository'):
|
||||||
if xml.find('source').get('path') == 'obsrepositories:/':
|
if xml.find('source').get('path') == 'obsrepositories:/':
|
||||||
for path in bi.pathes:
|
for path in bi.pathes:
|
||||||
if not os.path.isdir("repos/"+path):
|
if not os.path.isdir("repos/" + path):
|
||||||
continue
|
continue
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('--add-repo')
|
buildargs.append('--add-repo')
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append("dir://./repos/"+path)
|
buildargs.append("dir://./repos/" + path)
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('--add-repotype')
|
buildargs.append('--add-repotype')
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('rpm-md')
|
buildargs.append('rpm-md')
|
||||||
if xml.get('priority'):
|
if xml.get('priority'):
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('--add-repoprio='+xml.get('priority'))
|
buildargs.append('--add-repoprio=' + xml.get('priority'))
|
||||||
else:
|
else:
|
||||||
m = re.match(r"obs://[^/]+/([^/]+)/(\S+)", xml.find('source').get('path'))
|
m = re.match(r"obs://[^/]+/([^/]+)/(\S+)", xml.find('source').get('path'))
|
||||||
if not m:
|
if not m:
|
||||||
@ -1300,14 +1317,14 @@ def main(apiurl, opts, argv):
|
|||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('--add-repo')
|
buildargs.append('--add-repo')
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append("dir://./repos/"+project+"/"+repo)
|
buildargs.append("dir://./repos/" + project + "/" + repo)
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('--add-repotype')
|
buildargs.append('--add-repotype')
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('rpm-md')
|
buildargs.append('rpm-md')
|
||||||
if xml.get('priority'):
|
if xml.get('priority'):
|
||||||
buildargs.append('--kiwi-parameter')
|
buildargs.append('--kiwi-parameter')
|
||||||
buildargs.append('--add-repopriority='+xml.get('priority'))
|
buildargs.append('--add-repopriority=' + xml.get('priority'))
|
||||||
|
|
||||||
if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc" or vm_type == "nspawn":
|
if vm_type == "xen" or vm_type == "kvm" or vm_type == "lxc" or vm_type == "nspawn":
|
||||||
print('Skipping verification of package signatures due to secure VM build')
|
print('Skipping verification of package signatures due to secure VM build')
|
||||||
@ -1329,7 +1346,6 @@ def main(apiurl, opts, argv):
|
|||||||
if i.hdrmd5:
|
if i.hdrmd5:
|
||||||
if not i.name.startswith('container:') and i.pacsuffix != 'rpm':
|
if not i.name.startswith('container:') and i.pacsuffix != 'rpm':
|
||||||
continue
|
continue
|
||||||
from .util import packagequery
|
|
||||||
if i.name.startswith('container:'):
|
if i.name.startswith('container:'):
|
||||||
hdrmd5 = dgst(i.fullfilename)
|
hdrmd5 = dgst(i.fullfilename)
|
||||||
else:
|
else:
|
||||||
@ -1355,7 +1371,7 @@ def main(apiurl, opts, argv):
|
|||||||
rpmlist.append('%s %s\n' % (dep.name, dep.fullfilename))
|
rpmlist.append('%s %s\n' % (dep.name, dep.fullfilename))
|
||||||
for i in imagebins:
|
for i in imagebins:
|
||||||
rpmlist.append('%s preinstallimage\n' % i)
|
rpmlist.append('%s preinstallimage\n' % i)
|
||||||
rpmlist += [ '%s %s\n' % (i[0], i[1]) for i in rpmlist_prefers ]
|
rpmlist += ['%s %s\n' % (i[0], i[1]) for i in rpmlist_prefers]
|
||||||
|
|
||||||
if imagefile:
|
if imagefile:
|
||||||
rpmlist.append('preinstallimage: %s\n' % imagefile)
|
rpmlist.append('preinstallimage: %s\n' % imagefile)
|
||||||
@ -1394,50 +1410,49 @@ def main(apiurl, opts, argv):
|
|||||||
else:
|
else:
|
||||||
my_build_swap = build_root + '/swap'
|
my_build_swap = build_root + '/swap'
|
||||||
|
|
||||||
vm_options = [ '--vm-type=%s' % vm_type ]
|
vm_options = ['--vm-type=%s' % vm_type]
|
||||||
if vm_telnet:
|
if vm_telnet:
|
||||||
vm_options += [ '--vm-telnet=' + vm_telnet ]
|
vm_options += ['--vm-telnet=' + vm_telnet]
|
||||||
if vm_memory:
|
if vm_memory:
|
||||||
vm_options += [ '--memory=' + vm_memory ]
|
vm_options += ['--memory=' + vm_memory]
|
||||||
if vm_type != 'lxc' and vm_type != 'nspawn':
|
if vm_type != 'lxc' and vm_type != 'nspawn':
|
||||||
vm_options += [ '--vm-disk=' + my_build_device ]
|
vm_options += ['--vm-disk=' + my_build_device]
|
||||||
vm_options += [ '--vm-swap=' + my_build_swap ]
|
vm_options += ['--vm-swap=' + my_build_swap]
|
||||||
vm_options += [ '--logfile=%s/.build.log' % build_root ]
|
vm_options += ['--logfile=%s/.build.log' % build_root]
|
||||||
if vm_type == 'kvm':
|
if vm_type == 'kvm':
|
||||||
if config['build-kernel']:
|
if config['build-kernel']:
|
||||||
vm_options += [ '--vm-kernel=' + config['build-kernel'] ]
|
vm_options += ['--vm-kernel=' + config['build-kernel']]
|
||||||
if config['build-initrd']:
|
if config['build-initrd']:
|
||||||
vm_options += [ '--vm-initrd=' + config['build-initrd'] ]
|
vm_options += ['--vm-initrd=' + config['build-initrd']]
|
||||||
|
|
||||||
build_root += '/.mount'
|
build_root += '/.mount'
|
||||||
if vm_disk_size:
|
if vm_disk_size:
|
||||||
vm_options += [ '--vmdisk-rootsize=' + vm_disk_size ]
|
vm_options += ['--vmdisk-rootsize=' + vm_disk_size]
|
||||||
|
|
||||||
if config['build-vmdisk-swapsize']:
|
if config['build-vmdisk-swapsize']:
|
||||||
vm_options += [ '--vmdisk-swapsize=' + config['build-vmdisk-swapsize'] ]
|
vm_options += ['--vmdisk-swapsize=' + config['build-vmdisk-swapsize']]
|
||||||
if config['build-vmdisk-filesystem']:
|
if config['build-vmdisk-filesystem']:
|
||||||
vm_options += [ '--vmdisk-filesystem=' + config['build-vmdisk-filesystem'] ]
|
vm_options += ['--vmdisk-filesystem=' + config['build-vmdisk-filesystem']]
|
||||||
if config['build-vm-user']:
|
if config['build-vm-user']:
|
||||||
vm_options += [ '--vm-user=' + config['build-vm-user'] ]
|
vm_options += ['--vm-user=' + config['build-vm-user']]
|
||||||
|
|
||||||
|
|
||||||
if opts.preload:
|
if opts.preload:
|
||||||
print("Preload done for selected repo/arch.")
|
print("Preload done for selected repo/arch.")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
print('Running build')
|
print('Running build')
|
||||||
cmd = [ config['build-cmd'], '--root='+build_root,
|
cmd = [config['build-cmd'], '--root=' + build_root,
|
||||||
'--rpmlist='+rpmlist_filename,
|
'--rpmlist=' + rpmlist_filename,
|
||||||
'--dist='+bc_filename,
|
'--dist=' + bc_filename,
|
||||||
'--arch='+bi.buildarch ]
|
'--arch=' + bi.buildarch]
|
||||||
cmd += specialcmdopts + vm_options + buildargs
|
cmd += specialcmdopts + vm_options + buildargs
|
||||||
cmd += [ build_descr ]
|
cmd += [build_descr]
|
||||||
|
|
||||||
cmd = su_wrapper(cmd)
|
cmd = su_wrapper(cmd)
|
||||||
|
|
||||||
# change personality, if needed
|
# change personality, if needed
|
||||||
if hostarch != bi.buildarch and bi.buildarch in change_personality:
|
if hostarch != bi.buildarch and bi.buildarch in change_personality:
|
||||||
cmd = [ change_personality[bi.buildarch] ] + cmd
|
cmd = [change_personality[bi.buildarch]] + cmd
|
||||||
|
|
||||||
# record our settings for later builds
|
# record our settings for later builds
|
||||||
if is_package_dir(os.curdir):
|
if is_package_dir(os.curdir):
|
||||||
@ -1464,7 +1479,8 @@ def main(apiurl, opts, argv):
|
|||||||
(s_built, b_built) = get_built_files(pacdir, bi.buildtype)
|
(s_built, b_built) = get_built_files(pacdir, bi.buildtype)
|
||||||
|
|
||||||
print()
|
print()
|
||||||
if s_built: print(decode_it(s_built))
|
if s_built:
|
||||||
|
print(decode_it(s_built))
|
||||||
print()
|
print()
|
||||||
print(decode_it(b_built))
|
print(decode_it(b_built))
|
||||||
|
|
||||||
|
@ -8,11 +8,13 @@ import rpm
|
|||||||
|
|
||||||
class KeyError(Exception):
|
class KeyError(Exception):
|
||||||
def __init__(self, key, *args):
|
def __init__(self, key, *args):
|
||||||
Exception.__init__(self)
|
super().__init__()
|
||||||
self.args = args
|
self.args = args
|
||||||
self.key = key
|
self.key = key
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return ''+self.key+' :'+' '.join(self.args)
|
return '' + self.key + ' :' + ' '.join(self.args)
|
||||||
|
|
||||||
|
|
||||||
class Checker:
|
class Checker:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -23,9 +25,10 @@ class Checker:
|
|||||||
self.ts.initDB()
|
self.ts.initDB()
|
||||||
self.ts.openDB()
|
self.ts.openDB()
|
||||||
self.ts.setVSFlags(0)
|
self.ts.setVSFlags(0)
|
||||||
#self.ts.Debug(1)
|
# self.ts.Debug(1)
|
||||||
|
|
||||||
def readkeys(self, keys=[]):
|
def readkeys(self, keys=None):
|
||||||
|
keys = keys or []
|
||||||
rpm.addMacro('_dbpath', self.dbdir)
|
rpm.addMacro('_dbpath', self.dbdir)
|
||||||
for key in keys:
|
for key in keys:
|
||||||
try:
|
try:
|
||||||
@ -33,7 +36,7 @@ class Checker:
|
|||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print(e)
|
print(e)
|
||||||
|
|
||||||
if not len(self.imported):
|
if not self.imported:
|
||||||
raise KeyError('', "no key imported")
|
raise KeyError('', "no key imported")
|
||||||
|
|
||||||
rpm.delMacro("_dbpath")
|
rpm.delMacro("_dbpath")
|
||||||
@ -68,7 +71,7 @@ class Checker:
|
|||||||
if line[0:12] == "-----END PGP":
|
if line[0:12] == "-----END PGP":
|
||||||
break
|
break
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if (line[0] == '='):
|
if line[0] == '=':
|
||||||
crc = line[1:]
|
crc = line[1:]
|
||||||
line = fd.readline()
|
line = fd.readline()
|
||||||
break
|
break
|
||||||
@ -100,6 +103,7 @@ class Checker:
|
|||||||
if fd is not None:
|
if fd is not None:
|
||||||
os.close(fd)
|
os.close(fd)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import sys
|
import sys
|
||||||
keyfiles = []
|
keyfiles = []
|
||||||
|
1123
osc/commandline.py
1123
osc/commandline.py
File diff suppressed because it is too large
Load Diff
48
osc/conf.py
48
osc/conf.py
@ -36,7 +36,6 @@ The configuration dictionary could look like this:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
import bz2
|
|
||||||
import errno
|
import errno
|
||||||
import getpass
|
import getpass
|
||||||
import os
|
import os
|
||||||
@ -83,6 +82,7 @@ def _identify_osccookiejar():
|
|||||||
|
|
||||||
return os.path.join(osc_state_dir, 'cookiejar')
|
return os.path.join(osc_state_dir, 'cookiejar')
|
||||||
|
|
||||||
|
|
||||||
DEFAULTS = {'apiurl': 'https://api.opensuse.org',
|
DEFAULTS = {'apiurl': 'https://api.opensuse.org',
|
||||||
'user': None,
|
'user': None,
|
||||||
'pass': None,
|
'pass': None,
|
||||||
@ -106,7 +106,7 @@ DEFAULTS = {'apiurl': 'https://api.opensuse.org',
|
|||||||
'build-vm-user': '', # optional for VM builds
|
'build-vm-user': '', # optional for VM builds
|
||||||
'build-kernel': '', # optional for VM builds
|
'build-kernel': '', # optional for VM builds
|
||||||
'build-initrd': '', # optional for VM builds
|
'build-initrd': '', # optional for VM builds
|
||||||
'download-assets-cmd': '/usr/lib/build/download_assets', # optional for scm/git based builds
|
'download-assets-cmd': '/usr/lib/build/download_assets', # optional for scm/git based builds
|
||||||
|
|
||||||
'build-jobs': str(_get_processors()),
|
'build-jobs': str(_get_processors()),
|
||||||
'builtin_signature_check': '1', # by default use builtin check for verify pkgs
|
'builtin_signature_check': '1', # by default use builtin check for verify pkgs
|
||||||
@ -178,7 +178,7 @@ DEFAULTS = {'apiurl': 'https://api.opensuse.org',
|
|||||||
|
|
||||||
# heuristic to speedup Package.status
|
# heuristic to speedup Package.status
|
||||||
'status_mtime_heuristic': '0'
|
'status_mtime_heuristic': '0'
|
||||||
}
|
}
|
||||||
|
|
||||||
# some distros like Debian rename and move build to obs-build
|
# some distros like Debian rename and move build to obs-build
|
||||||
if not os.path.isfile('/usr/bin/build') and os.path.isfile('/usr/bin/obs-build'):
|
if not os.path.isfile('/usr/bin/build') and os.path.isfile('/usr/bin/obs-build'):
|
||||||
@ -187,17 +187,17 @@ if not os.path.isfile('/usr/lib/build/vc') and os.path.isfile('/usr/lib/obs-buil
|
|||||||
DEFAULTS['vc-cmd'] = '/usr/lib/obs-build/vc'
|
DEFAULTS['vc-cmd'] = '/usr/lib/obs-build/vc'
|
||||||
|
|
||||||
boolean_opts = ['debug', 'do_package_tracking', 'http_debug', 'post_mortem', 'traceback', 'check_filelist',
|
boolean_opts = ['debug', 'do_package_tracking', 'http_debug', 'post_mortem', 'traceback', 'check_filelist',
|
||||||
'checkout_no_colon', 'checkout_rooted', 'check_for_request_on_action', 'linkcontrol', 'show_download_progress', 'request_show_interactive',
|
'checkout_no_colon', 'checkout_rooted', 'check_for_request_on_action', 'linkcontrol', 'show_download_progress', 'request_show_interactive',
|
||||||
'request_show_source_buildstatus', 'review_inherit_group', 'use_keyring', 'no_verify', 'builtin_signature_check',
|
'request_show_source_buildstatus', 'review_inherit_group', 'use_keyring', 'no_verify', 'builtin_signature_check',
|
||||||
'http_full_debug', 'include_request_from_project', 'local_service_run', 'buildlog_strip_time', 'no_preinstallimage',
|
'http_full_debug', 'include_request_from_project', 'local_service_run', 'buildlog_strip_time', 'no_preinstallimage',
|
||||||
'status_mtime_heuristic', 'print_web_links', 'ccache', 'sccache', 'build-shell-after-fail']
|
'status_mtime_heuristic', 'print_web_links', 'ccache', 'sccache', 'build-shell-after-fail']
|
||||||
integer_opts = ['build-jobs']
|
integer_opts = ['build-jobs']
|
||||||
|
|
||||||
api_host_options = ['user', 'pass', 'passx', 'aliases', 'http_headers', 'realname', 'email', 'sslcertck', 'cafile', 'capath', 'trusted_prj',
|
api_host_options = ['user', 'pass', 'passx', 'aliases', 'http_headers', 'realname', 'email', 'sslcertck', 'cafile', 'capath', 'trusted_prj',
|
||||||
'downloadurl', 'sshkey']
|
'downloadurl', 'sshkey']
|
||||||
|
|
||||||
|
|
||||||
def apply_option_types(config):
|
def apply_option_types(config, conffile=""):
|
||||||
"""
|
"""
|
||||||
Return a copy of `config` dictionary with values converted to their expected types
|
Return a copy of `config` dictionary with values converted to their expected types
|
||||||
according to the enumerated option types (boolean_opts, integer_opts).
|
according to the enumerated option types (boolean_opts, integer_opts).
|
||||||
@ -431,7 +431,7 @@ your credentials for this apiurl.
|
|||||||
def parse_apisrv_url(scheme, apisrv):
|
def parse_apisrv_url(scheme, apisrv):
|
||||||
if apisrv.startswith('http://') or apisrv.startswith('https://'):
|
if apisrv.startswith('http://') or apisrv.startswith('https://'):
|
||||||
url = apisrv
|
url = apisrv
|
||||||
elif scheme != None:
|
elif scheme is not None:
|
||||||
url = scheme + apisrv
|
url = scheme + apisrv
|
||||||
else:
|
else:
|
||||||
url = "https://" + apisrv
|
url = "https://" + apisrv
|
||||||
@ -493,8 +493,8 @@ def get_apiurl_usr(apiurl):
|
|||||||
try:
|
try:
|
||||||
return get_apiurl_api_host_options(apiurl)['user']
|
return get_apiurl_api_host_options(apiurl)['user']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print('no specific section found in config file for host of [\'%s\'] - using default user: \'%s\'' \
|
print('no specific section found in config file for host of [\'%s\'] - using default user: \'%s\''
|
||||||
% (apiurl, config['user']), file=sys.stderr)
|
% (apiurl, config['user']), file=sys.stderr)
|
||||||
return config['user']
|
return config['user']
|
||||||
|
|
||||||
|
|
||||||
@ -559,7 +559,7 @@ def config_set_option(section, opt, val=None, delete=False, update=True, creds_m
|
|||||||
"""
|
"""
|
||||||
cp = get_configParser(config['conffile'])
|
cp = get_configParser(config['conffile'])
|
||||||
# don't allow "internal" options
|
# don't allow "internal" options
|
||||||
general_opts = [i for i in DEFAULTS.keys() if not i in ['user', 'pass', 'passx']]
|
general_opts = [i for i in DEFAULTS.keys() if i not in ['user', 'pass', 'passx']]
|
||||||
if section != 'general':
|
if section != 'general':
|
||||||
section = config['apiurl_aliases'].get(section, section)
|
section = config['apiurl_aliases'].get(section, section)
|
||||||
scheme, host, path = \
|
scheme, host, path = \
|
||||||
@ -577,10 +577,10 @@ def config_set_option(section, opt, val=None, delete=False, update=True, creds_m
|
|||||||
sections[apiurl] = url
|
sections[apiurl] = url
|
||||||
|
|
||||||
section = sections.get(section.rstrip('/'), section)
|
section = sections.get(section.rstrip('/'), section)
|
||||||
if not section in cp.sections():
|
if section not in cp.sections():
|
||||||
raise oscerr.ConfigError('unknown section \'%s\'' % section, config['conffile'])
|
raise oscerr.ConfigError('unknown section \'%s\'' % section, config['conffile'])
|
||||||
if section == 'general' and not opt in general_opts or \
|
if section == 'general' and opt not in general_opts or \
|
||||||
section != 'general' and not opt in api_host_options:
|
section != 'general' and opt not in api_host_options:
|
||||||
raise oscerr.ConfigError('unknown config option \'%s\'' % opt, config['conffile'])
|
raise oscerr.ConfigError('unknown config option \'%s\'' % opt, config['conffile'])
|
||||||
|
|
||||||
if not val and not delete and opt == 'pass' and creds_mgr_descr is not None:
|
if not val and not delete and opt == 'pass' and creds_mgr_descr is not None:
|
||||||
@ -635,6 +635,7 @@ def config_set_option(section, opt, val=None, delete=False, update=True, creds_m
|
|||||||
return (opt, cp.get(section, opt, raw=True))
|
return (opt, cp.get(section, opt, raw=True))
|
||||||
return (opt, None)
|
return (opt, None)
|
||||||
|
|
||||||
|
|
||||||
def _extract_user_compat(cp, section, creds_mgr):
|
def _extract_user_compat(cp, section, creds_mgr):
|
||||||
"""
|
"""
|
||||||
This extracts the user either from the ConfigParser or
|
This extracts the user either from the ConfigParser or
|
||||||
@ -645,6 +646,7 @@ def _extract_user_compat(cp, section, creds_mgr):
|
|||||||
user = creds_mgr.get_user(section)
|
user = creds_mgr.get_user(section)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
def write_initial_config(conffile, entries, custom_template='', creds_mgr_descriptor=None):
|
def write_initial_config(conffile, entries, custom_template='', creds_mgr_descriptor=None):
|
||||||
"""
|
"""
|
||||||
write osc's intial configuration file. entries is a dict which contains values
|
write osc's intial configuration file. entries is a dict which contains values
|
||||||
@ -705,7 +707,7 @@ def _get_credentials_manager(url, cp):
|
|||||||
|
|
||||||
class APIHostOptionsEntry(dict):
|
class APIHostOptionsEntry(dict):
|
||||||
def __getitem__(self, key, *args, **kwargs):
|
def __getitem__(self, key, *args, **kwargs):
|
||||||
value = super(self.__class__, self).__getitem__(key, *args, **kwargs)
|
value = super().__getitem__(key, *args, **kwargs)
|
||||||
if key == 'pass' and callable(value):
|
if key == 'pass' and callable(value):
|
||||||
print('Warning: use of a deprecated credentials manager API.',
|
print('Warning: use of a deprecated credentials manager API.',
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
@ -732,7 +734,7 @@ def get_config(override_conffile=None,
|
|||||||
|
|
||||||
conffile = os.path.expanduser(conffile)
|
conffile = os.path.expanduser(conffile)
|
||||||
if not os.path.exists(conffile):
|
if not os.path.exists(conffile):
|
||||||
raise oscerr.NoConfigfile(conffile, \
|
raise oscerr.NoConfigfile(conffile,
|
||||||
account_not_configured_text % conffile)
|
account_not_configured_text % conffile)
|
||||||
|
|
||||||
# okay, we made sure that oscrc exists
|
# okay, we made sure that oscrc exists
|
||||||
@ -759,7 +761,7 @@ def get_config(override_conffile=None,
|
|||||||
config = dict(cp.items('general', raw=1))
|
config = dict(cp.items('general', raw=1))
|
||||||
config['conffile'] = conffile
|
config['conffile'] = conffile
|
||||||
|
|
||||||
config = apply_option_types(config)
|
config = apply_option_types(config, conffile)
|
||||||
|
|
||||||
config['packagecachedir'] = os.path.expanduser(config['packagecachedir'])
|
config['packagecachedir'] = os.path.expanduser(config['packagecachedir'])
|
||||||
config['exclude_glob'] = config['exclude_glob'].split()
|
config['exclude_glob'] = config['exclude_glob'].split()
|
||||||
@ -831,7 +833,7 @@ def get_config(override_conffile=None,
|
|||||||
if cp.has_option(url, 'build-root', proper=True):
|
if cp.has_option(url, 'build-root', proper=True):
|
||||||
api_host_options[apiurl]['build-root'] = cp.get(url, 'build-root', raw=True)
|
api_host_options[apiurl]['build-root'] = cp.get(url, 'build-root', raw=True)
|
||||||
|
|
||||||
if not 'sslcertck' in api_host_options[apiurl]:
|
if 'sslcertck' not in api_host_options[apiurl]:
|
||||||
api_host_options[apiurl]['sslcertck'] = True
|
api_host_options[apiurl]['sslcertck'] = True
|
||||||
|
|
||||||
if 'allow_http' not in api_host_options[apiurl]:
|
if 'allow_http' not in api_host_options[apiurl]:
|
||||||
@ -868,8 +870,8 @@ def get_config(override_conffile=None,
|
|||||||
scheme = config.get('scheme', 'https')
|
scheme = config.get('scheme', 'https')
|
||||||
config['apiurl'] = urljoin(scheme, apisrv)
|
config['apiurl'] = urljoin(scheme, apisrv)
|
||||||
if 'apisrc' in config or 'scheme' in config:
|
if 'apisrc' in config or 'scheme' in config:
|
||||||
print('Warning: Use of the \'scheme\' or \'apisrv\' in oscrc is deprecated!\n' \
|
print('Warning: Use of the \'scheme\' or \'apisrv\' in oscrc is deprecated!\n'
|
||||||
'Warning: See README for migration details.', file=sys.stderr)
|
'Warning: See README for migration details.', file=sys.stderr)
|
||||||
if 'build_platform' in config:
|
if 'build_platform' in config:
|
||||||
print('Warning: Use of \'build_platform\' config option is deprecated! (use \'build_repository\' instead)', file=sys.stderr)
|
print('Warning: Use of \'build_platform\' config option is deprecated! (use \'build_repository\' instead)', file=sys.stderr)
|
||||||
config['build_repository'] = config['build_platform']
|
config['build_repository'] = config['build_platform']
|
||||||
@ -933,6 +935,7 @@ def identify_conf():
|
|||||||
|
|
||||||
return conffile
|
return conffile
|
||||||
|
|
||||||
|
|
||||||
def interactive_config_setup(conffile, apiurl, initial=True):
|
def interactive_config_setup(conffile, apiurl, initial=True):
|
||||||
scheme = urlsplit(apiurl)[0]
|
scheme = urlsplit(apiurl)[0]
|
||||||
http = scheme == "http"
|
http = scheme == "http"
|
||||||
@ -959,6 +962,7 @@ def interactive_config_setup(conffile, apiurl, initial=True):
|
|||||||
else:
|
else:
|
||||||
add_section(conffile, apiurl, user, passwd, creds_mgr_descriptor=creds_mgr_descr, allow_http=http)
|
add_section(conffile, apiurl, user, passwd, creds_mgr_descriptor=creds_mgr_descr, allow_http=http)
|
||||||
|
|
||||||
|
|
||||||
def select_credentials_manager_descr():
|
def select_credentials_manager_descr():
|
||||||
if not credentials.has_keyring_support():
|
if not credentials.has_keyring_support():
|
||||||
print('To use keyrings please install python%d-keyring.' % sys.version_info.major)
|
print('To use keyrings please install python%d-keyring.' % sys.version_info.major)
|
||||||
|
@ -44,7 +44,7 @@ class MockRequest:
|
|||||||
return self.headers.get(header_name, default)
|
return self.headers.get(header_name, default)
|
||||||
|
|
||||||
def has_header(self, header_name):
|
def has_header(self, header_name):
|
||||||
return (header_name in self.headers)
|
return header_name in self.headers
|
||||||
|
|
||||||
def add_unredirected_header(self, key, val):
|
def add_unredirected_header(self, key, val):
|
||||||
# modifies the `headers` variable that was passed to object's constructor
|
# modifies the `headers` variable that was passed to object's constructor
|
||||||
@ -97,9 +97,6 @@ def get_proxy_manager(env):
|
|||||||
else:
|
else:
|
||||||
proxy_url = f"{proxy_purl.scheme}://{proxy_purl.host}"
|
proxy_url = f"{proxy_purl.scheme}://{proxy_purl.host}"
|
||||||
|
|
||||||
# import osc.core here to avoid cyclic imports
|
|
||||||
from . import core
|
|
||||||
|
|
||||||
proxy_headers = urllib3.make_headers(
|
proxy_headers = urllib3.make_headers(
|
||||||
proxy_basic_auth=proxy_purl.auth,
|
proxy_basic_auth=proxy_purl.auth,
|
||||||
user_agent=f"osc/{__version__}",
|
user_agent=f"osc/{__version__}",
|
||||||
@ -167,9 +164,6 @@ def http_request(method, url, headers=None, data=None, file=None):
|
|||||||
:param file: Path to a file to send as data in the request body (conflicts with `data`).
|
:param file: Path to a file to send as data in the request body (conflicts with `data`).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# import osc.core here to avoid cyclic imports
|
|
||||||
from . import core
|
|
||||||
|
|
||||||
purl = urllib3.util.parse_url(url)
|
purl = urllib3.util.parse_url(url)
|
||||||
apiurl = conf.extract_known_apiurl(url)
|
apiurl = conf.extract_known_apiurl(url)
|
||||||
headers = urllib3.response.HTTPHeaderDict(headers or {})
|
headers = urllib3.response.HTTPHeaderDict(headers or {})
|
||||||
|
1121
osc/core.py
1121
osc/core.py
File diff suppressed because it is too large
Load Diff
@ -137,17 +137,17 @@ class ObfuscatedConfigFileCredentialsManager(PlaintextConfigFileCredentialsManag
|
|||||||
if self._cp.has_option(url, 'passx', proper=True):
|
if self._cp.has_option(url, 'passx', proper=True):
|
||||||
passwd = self._cp.get(url, 'passx', raw=True)
|
passwd = self._cp.get(url, 'passx', raw=True)
|
||||||
else:
|
else:
|
||||||
passwd = super(self.__class__, self).get_password(url, user, apiurl=apiurl)
|
passwd = super().get_password(url, user, apiurl=apiurl)
|
||||||
return self.decode_password(passwd)
|
return self.decode_password(passwd)
|
||||||
|
|
||||||
def set_password(self, url, user, password):
|
def set_password(self, url, user, password):
|
||||||
compressed_pw = bz2.compress(password.encode('ascii'))
|
compressed_pw = bz2.compress(password.encode('ascii'))
|
||||||
password = base64.b64encode(compressed_pw).decode("ascii")
|
password = base64.b64encode(compressed_pw).decode("ascii")
|
||||||
super(self.__class__, self).set_password(url, user, password)
|
super().set_password(url, user, password)
|
||||||
|
|
||||||
def delete_password(self, url, user):
|
def delete_password(self, url, user):
|
||||||
self._cp.remove_option(url, 'passx')
|
self._cp.remove_option(url, 'passx')
|
||||||
super(self.__class__, self).delete_password(url, user)
|
super().delete_password(url, user)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def decode_password(cls, password):
|
def decode_password(cls, password):
|
||||||
@ -174,7 +174,7 @@ class ObfuscatedConfigFileDescriptor(AbstractCredentialsManagerDescriptor):
|
|||||||
|
|
||||||
class TransientCredentialsManager(AbstractCredentialsManager):
|
class TransientCredentialsManager(AbstractCredentialsManager):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(self.__class__, self).__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self._password = None
|
self._password = None
|
||||||
|
|
||||||
def _process_options(self, options):
|
def _process_options(self, options):
|
||||||
@ -232,7 +232,7 @@ class KeyringCredentialsManager(AbstractCredentialsManager):
|
|||||||
def create(cls, cp, options):
|
def create(cls, cp, options):
|
||||||
if not has_keyring_support():
|
if not has_keyring_support():
|
||||||
return None
|
return None
|
||||||
return super(cls, cls).create(cp, options)
|
return super().create(cp, options)
|
||||||
|
|
||||||
def _get_password(self, url, user, apiurl=None):
|
def _get_password(self, url, user, apiurl=None):
|
||||||
self._load_backend()
|
self._load_backend()
|
||||||
|
25
osc/fetch.py
25
osc/fetch.py
@ -6,14 +6,17 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
from urllib.request import HTTPError
|
from urllib.request import HTTPError
|
||||||
|
|
||||||
|
from . import checker as osc_checker
|
||||||
from . import conf
|
from . import conf
|
||||||
from . import oscerr
|
from . import oscerr
|
||||||
from .core import makeurl, streamfile, dgst
|
from .core import makeurl, dgst
|
||||||
from .grabber import OscFileGrabber, OscMirrorGroup
|
from .grabber import OscFileGrabber, OscMirrorGroup
|
||||||
from .meter import create_text_meter
|
from .meter import create_text_meter
|
||||||
from .util import packagequery, cpio
|
from .util import packagequery, cpio
|
||||||
@ -21,9 +24,9 @@ from .util.helper import decode_it
|
|||||||
|
|
||||||
|
|
||||||
class Fetcher:
|
class Fetcher:
|
||||||
def __init__(self, cachedir='/tmp', api_host_options={}, urllist=[],
|
def __init__(self, cachedir='/tmp', urllist=None,
|
||||||
http_debug=False, cookiejar=None, offline=False,
|
http_debug=False, cookiejar=None, offline=False,
|
||||||
enable_cpio=True, modules=[], download_api_only=False):
|
enable_cpio=True, modules=None, download_api_only=False):
|
||||||
# set up progress bar callback
|
# set up progress bar callback
|
||||||
self.progress_obj = None
|
self.progress_obj = None
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
@ -31,8 +34,8 @@ class Fetcher:
|
|||||||
|
|
||||||
self.cachedir = cachedir
|
self.cachedir = cachedir
|
||||||
# generic download URL lists
|
# generic download URL lists
|
||||||
self.urllist = urllist
|
self.urllist = urllist or []
|
||||||
self.modules = modules
|
self.modules = modules or []
|
||||||
self.http_debug = http_debug
|
self.http_debug = http_debug
|
||||||
self.offline = offline
|
self.offline = offline
|
||||||
self.cpio = {}
|
self.cpio = {}
|
||||||
@ -135,7 +138,7 @@ class Fetcher:
|
|||||||
with tempfile.NamedTemporaryFile(prefix='osc_build',
|
with tempfile.NamedTemporaryFile(prefix='osc_build',
|
||||||
delete=False) as tmpfile:
|
delete=False) as tmpfile:
|
||||||
mg_stat = mg.urlgrab(pac.filename, filename=tmpfile.name,
|
mg_stat = mg.urlgrab(pac.filename, filename=tmpfile.name,
|
||||||
text='%s(%s) %s' % (prefix, pac.project, pac.filename))
|
text='%s(%s) %s' % (prefix, pac.project, pac.filename))
|
||||||
if mg_stat:
|
if mg_stat:
|
||||||
self.move_package(tmpfile.name, pac.localdir, pac)
|
self.move_package(tmpfile.name, pac.localdir, pac)
|
||||||
|
|
||||||
@ -155,7 +158,6 @@ class Fetcher:
|
|||||||
os.unlink(tmpfile.name)
|
os.unlink(tmpfile.name)
|
||||||
|
|
||||||
def move_package(self, tmpfile, destdir, pac_obj=None):
|
def move_package(self, tmpfile, destdir, pac_obj=None):
|
||||||
import shutil
|
|
||||||
canonname = None
|
canonname = None
|
||||||
if pac_obj and pac_obj.name.startswith('container:'):
|
if pac_obj and pac_obj.name.startswith('container:'):
|
||||||
canonname = pac_obj.canonname
|
canonname = pac_obj.canonname
|
||||||
@ -258,7 +260,7 @@ class Fetcher:
|
|||||||
hdrmd5 = packagequery.PackageQuery.queryhdrmd5(i.fullfilename)
|
hdrmd5 = packagequery.PackageQuery.queryhdrmd5(i.fullfilename)
|
||||||
if not hdrmd5 or hdrmd5 != i.hdrmd5:
|
if not hdrmd5 or hdrmd5 != i.hdrmd5:
|
||||||
print('%s/%s: attempting download from api, since the hdrmd5 did not match - %s != %s'
|
print('%s/%s: attempting download from api, since the hdrmd5 did not match - %s != %s'
|
||||||
% (i.project, i.name, hdrmd5, i.hdrmd5))
|
% (i.project, i.name, hdrmd5, i.hdrmd5))
|
||||||
os.unlink(i.fullfilename)
|
os.unlink(i.fullfilename)
|
||||||
self.__add_cpio(i)
|
self.__add_cpio(i)
|
||||||
|
|
||||||
@ -324,8 +326,6 @@ def verify_pacs_old(pac_list):
|
|||||||
Check all packages in one go, since this takes only 6 seconds on my Athlon 700
|
Check all packages in one go, since this takes only 6 seconds on my Athlon 700
|
||||||
instead of 20 when calling 'rpm -K' for each of them.
|
instead of 20 when calling 'rpm -K' for each of them.
|
||||||
"""
|
"""
|
||||||
import subprocess
|
|
||||||
|
|
||||||
if not pac_list:
|
if not pac_list:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -374,7 +374,7 @@ def verify_pacs_old(pac_list):
|
|||||||
|
|
||||||
- You may use --no-verify to skip the verification (which is a risk for your system).
|
- You may use --no-verify to skip the verification (which is a risk for your system).
|
||||||
""" % {'name': missing_key,
|
""" % {'name': missing_key,
|
||||||
'dir': os.path.expanduser('~')}, file=sys.stderr)
|
'dir': os.path.expanduser('~')}, file=sys.stderr)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print("""
|
print("""
|
||||||
@ -403,9 +403,8 @@ def verify_pacs(bi):
|
|||||||
|
|
||||||
print("using keys from", ', '.join(bi.prjkeys))
|
print("using keys from", ', '.join(bi.prjkeys))
|
||||||
|
|
||||||
from . import checker
|
|
||||||
failed = False
|
failed = False
|
||||||
checker = checker.Checker()
|
checker = osc_checker.Checker()
|
||||||
try:
|
try:
|
||||||
checker.readkeys(bi.keys)
|
checker.readkeys(bi.keys)
|
||||||
for pkg in pac_list:
|
for pkg in pac_list:
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
from urllib.request import HTTPError
|
from urllib.request import HTTPError
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
|
@ -62,6 +62,7 @@ def create_text_meter(*args, **kwargs):
|
|||||||
return TextMeter(*args, **kwargs)
|
return TextMeter(*args, **kwargs)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
if have_pb_module:
|
if have_pb_module:
|
||||||
TextMeter = PBTextMeter
|
TextMeter = PBTextMeter
|
||||||
else:
|
else:
|
||||||
|
@ -4,120 +4,148 @@
|
|||||||
# either version 2, or (at your option) any later version.
|
# either version 2, or (at your option) any later version.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class OscBaseError(Exception):
|
class OscBaseError(Exception):
|
||||||
def __init__(self, args=()):
|
def __init__(self, args=()):
|
||||||
Exception.__init__(self)
|
super().__init__()
|
||||||
self.args = args
|
self.args = args
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return ''.join(self.args)
|
return ''.join(self.args)
|
||||||
|
|
||||||
|
|
||||||
class UserAbort(OscBaseError):
|
class UserAbort(OscBaseError):
|
||||||
"""Exception raised when the user requested abortion"""
|
"""Exception raised when the user requested abortion"""
|
||||||
|
|
||||||
|
|
||||||
class ConfigError(OscBaseError):
|
class ConfigError(OscBaseError):
|
||||||
"""Exception raised when there is an error in the config file"""
|
"""Exception raised when there is an error in the config file"""
|
||||||
|
|
||||||
def __init__(self, msg, fname):
|
def __init__(self, msg, fname):
|
||||||
OscBaseError.__init__(self)
|
super().__init__()
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
self.file = fname
|
self.file = fname
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Error in config file {self.file}\n {self.msg}"
|
return f"Error in config file {self.file}\n {self.msg}"
|
||||||
|
|
||||||
|
|
||||||
class ConfigMissingApiurl(ConfigError):
|
class ConfigMissingApiurl(ConfigError):
|
||||||
"""Exception raised when a apiurl does not exist in the config file"""
|
"""Exception raised when a apiurl does not exist in the config file"""
|
||||||
|
|
||||||
def __init__(self, msg, fname, url):
|
def __init__(self, msg, fname, url):
|
||||||
ConfigError.__init__(self, msg, fname)
|
super().__init__(msg, fname)
|
||||||
self.url = url
|
self.url = url
|
||||||
|
|
||||||
|
|
||||||
class ConfigMissingCredentialsError(ConfigError):
|
class ConfigMissingCredentialsError(ConfigError):
|
||||||
def __init__(self, msg, fname, url):
|
def __init__(self, msg, fname, url):
|
||||||
ConfigError.__init__(self, msg, fname)
|
super().__init__(msg, fname)
|
||||||
self.url = url
|
self.url = url
|
||||||
|
|
||||||
|
|
||||||
class APIError(OscBaseError):
|
class APIError(OscBaseError):
|
||||||
"""Exception raised when there is an error in the output from the API"""
|
"""Exception raised when there is an error in the output from the API"""
|
||||||
|
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
OscBaseError.__init__(self)
|
super().__init__()
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
class NoConfigfile(OscBaseError):
|
class NoConfigfile(OscBaseError):
|
||||||
"""Exception raised when osc's configfile cannot be found"""
|
"""Exception raised when osc's configfile cannot be found"""
|
||||||
|
|
||||||
def __init__(self, fname, msg):
|
def __init__(self, fname, msg):
|
||||||
OscBaseError.__init__(self)
|
super().__init__()
|
||||||
self.file = fname
|
self.file = fname
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Config file cannot be found: {self.file}\n {self.msg}"
|
return f"Config file cannot be found: {self.file}\n {self.msg}"
|
||||||
|
|
||||||
|
|
||||||
class ExtRuntimeError(OscBaseError):
|
class ExtRuntimeError(OscBaseError):
|
||||||
"""Exception raised when there is a runtime error of an external tool"""
|
"""Exception raised when there is a runtime error of an external tool"""
|
||||||
|
|
||||||
def __init__(self, msg, fname):
|
def __init__(self, msg, fname):
|
||||||
OscBaseError.__init__(self)
|
super().__init__()
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
self.file = fname
|
self.file = fname
|
||||||
|
|
||||||
|
|
||||||
class ServiceRuntimeError(OscBaseError):
|
class ServiceRuntimeError(OscBaseError):
|
||||||
"""Exception raised when the execution of a source service failed"""
|
"""Exception raised when the execution of a source service failed"""
|
||||||
|
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
OscBaseError.__init__(self)
|
super().__init__()
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
class WrongArgs(OscBaseError):
|
class WrongArgs(OscBaseError):
|
||||||
"""Exception raised by the cli for wrong arguments usage"""
|
"""Exception raised by the cli for wrong arguments usage"""
|
||||||
|
|
||||||
|
|
||||||
class WrongOptions(OscBaseError):
|
class WrongOptions(OscBaseError):
|
||||||
"""Exception raised by the cli for wrong option usage"""
|
"""Exception raised by the cli for wrong option usage"""
|
||||||
#def __str__(self):
|
# def __str__(self):
|
||||||
# s = 'Sorry, wrong options.'
|
# s = 'Sorry, wrong options.'
|
||||||
# if self.args:
|
# if self.args:
|
||||||
# s += '\n' + self.args
|
# s += '\n' + self.args
|
||||||
# return s
|
# return s
|
||||||
|
|
||||||
|
|
||||||
class NoWorkingCopy(OscBaseError):
|
class NoWorkingCopy(OscBaseError):
|
||||||
"""Exception raised when directory is neither a project dir nor a package dir"""
|
"""Exception raised when directory is neither a project dir nor a package dir"""
|
||||||
|
|
||||||
|
|
||||||
class NotMissing(OscBaseError):
|
class NotMissing(OscBaseError):
|
||||||
"""Exception raised when link target should not exist, but it does"""
|
"""Exception raised when link target should not exist, but it does"""
|
||||||
|
|
||||||
|
|
||||||
class WorkingCopyWrongVersion(OscBaseError):
|
class WorkingCopyWrongVersion(OscBaseError):
|
||||||
"""Exception raised when working copy's .osc/_osclib_version doesn't match"""
|
"""Exception raised when working copy's .osc/_osclib_version doesn't match"""
|
||||||
|
|
||||||
|
|
||||||
class WorkingCopyOutdated(OscBaseError):
|
class WorkingCopyOutdated(OscBaseError):
|
||||||
"""Exception raised when the working copy is outdated.
|
"""Exception raised when the working copy is outdated.
|
||||||
It takes a tuple with three arguments: path to wc,
|
It takes a tuple with three arguments: path to wc,
|
||||||
revision that it has, revision that it should have.
|
revision that it has, revision that it should have.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return ('Working copy \'%s\' is out of date (rev %s vs rev %s).\n'
|
return ('Working copy \'%s\' is out of date (rev %s vs rev %s).\n'
|
||||||
'Looks as if you need to update it first.' \
|
'Looks as if you need to update it first.'
|
||||||
% (self.args[0], self.args[1], self.args[2]))
|
% (self.args[0], self.args[1], self.args[2]))
|
||||||
|
|
||||||
|
|
||||||
class PackageError(OscBaseError):
|
class PackageError(OscBaseError):
|
||||||
"""Base class for all Package related exceptions"""
|
"""Base class for all Package related exceptions"""
|
||||||
|
|
||||||
def __init__(self, prj, pac):
|
def __init__(self, prj, pac):
|
||||||
OscBaseError.__init__(self)
|
super().__init__()
|
||||||
self.prj = prj
|
self.prj = prj
|
||||||
self.pac = pac
|
self.pac = pac
|
||||||
|
|
||||||
|
|
||||||
class WorkingCopyInconsistent(PackageError):
|
class WorkingCopyInconsistent(PackageError):
|
||||||
"""Exception raised when the working copy is in an inconsistent state"""
|
"""Exception raised when the working copy is in an inconsistent state"""
|
||||||
|
|
||||||
def __init__(self, prj, pac, dirty_files, msg):
|
def __init__(self, prj, pac, dirty_files, msg):
|
||||||
PackageError.__init__(self, prj, pac)
|
super().__init__(prj, pac)
|
||||||
self.dirty_files = dirty_files
|
self.dirty_files = dirty_files
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
class LinkExpandError(PackageError):
|
class LinkExpandError(PackageError):
|
||||||
"""Exception raised when source link expansion fails"""
|
"""Exception raised when source link expansion fails"""
|
||||||
|
|
||||||
def __init__(self, prj, pac, msg):
|
def __init__(self, prj, pac, msg):
|
||||||
PackageError.__init__(self, prj, pac)
|
super().__init__(prj, pac)
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
class OscIOError(OscBaseError):
|
class OscIOError(OscBaseError):
|
||||||
def __init__(self, e, msg):
|
def __init__(self, e, msg):
|
||||||
OscBaseError.__init__(self)
|
super().__init__()
|
||||||
self.e = e
|
self.e = e
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
@ -133,6 +161,7 @@ class OscInvalidRevision(OscValueError):
|
|||||||
"""
|
"""
|
||||||
Invalid revision value.
|
Invalid revision value.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "Invalid revision value: {}".format("".join(self.args))
|
return "Invalid revision value: {}".format("".join(self.args))
|
||||||
|
|
||||||
@ -141,43 +170,52 @@ class PackageNotInstalled(OscBaseError):
|
|||||||
"""
|
"""
|
||||||
Exception raised when a package is not installed on local system
|
Exception raised when a package is not installed on local system
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, pkg):
|
def __init__(self, pkg):
|
||||||
OscBaseError.__init__(self, (pkg,))
|
super().__init__((pkg,))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return 'Package %s is required for this operation' % self.args
|
return 'Package %s is required for this operation' % self.args
|
||||||
|
|
||||||
|
|
||||||
class SignalInterrupt(Exception):
|
class SignalInterrupt(Exception):
|
||||||
"""Exception raised on SIGTERM and SIGHUP."""
|
"""Exception raised on SIGTERM and SIGHUP."""
|
||||||
|
|
||||||
|
|
||||||
class PackageExists(PackageError):
|
class PackageExists(PackageError):
|
||||||
"""
|
"""
|
||||||
Exception raised when a local object already exists
|
Exception raised when a local object already exists
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, prj, pac, msg):
|
def __init__(self, prj, pac, msg):
|
||||||
PackageError.__init__(self, prj, pac)
|
super().__init__(prj, pac)
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
class PackageMissing(PackageError):
|
class PackageMissing(PackageError):
|
||||||
"""
|
"""
|
||||||
Exception raised when a local object doesn't exist
|
Exception raised when a local object doesn't exist
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, prj, pac, msg):
|
def __init__(self, prj, pac, msg):
|
||||||
PackageError.__init__(self, prj, pac)
|
super().__init__(prj, pac)
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
class PackageFileConflict(PackageError):
|
class PackageFileConflict(PackageError):
|
||||||
"""
|
"""
|
||||||
Exception raised when there's a file conflict.
|
Exception raised when there's a file conflict.
|
||||||
Conflict doesn't mean an unsuccessfull merge in this context.
|
Conflict doesn't mean an unsuccessfull merge in this context.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, prj, pac, file, msg):
|
def __init__(self, prj, pac, file, msg):
|
||||||
PackageError.__init__(self, prj, pac)
|
super().__init__(prj, pac)
|
||||||
self.file = file
|
self.file = file
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
class PackageInternalError(PackageError):
|
class PackageInternalError(PackageError):
|
||||||
def __init__(self, prj, pac, msg):
|
def __init__(self, prj, pac, msg):
|
||||||
PackageError.__init__(self, prj, pac)
|
super().__init__(prj, pac)
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
# vim: sw=4 et
|
# vim: sw=4 et
|
||||||
|
@ -1,14 +1,10 @@
|
|||||||
import binascii
|
import binascii
|
||||||
import datetime
|
|
||||||
import errno
|
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
import ssl
|
import ssl
|
||||||
import sys
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import urllib3.connection
|
|
||||||
from cryptography import x509
|
from cryptography import x509
|
||||||
from cryptography.hazmat.primitives import hashes
|
from cryptography.hazmat.primitives import hashes
|
||||||
from cryptography.hazmat.primitives import serialization
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
@ -28,7 +28,7 @@ if not hasattr(os, 'SEEK_SET'):
|
|||||||
class ArError(Exception):
|
class ArError(Exception):
|
||||||
"""Base class for all ar related errors"""
|
"""Base class for all ar related errors"""
|
||||||
def __init__(self, fn, msg):
|
def __init__(self, fn, msg):
|
||||||
Exception.__init__(self)
|
super().__init__()
|
||||||
self.file = fn
|
self.file = fn
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
@ -57,7 +57,7 @@ class ArHdr:
|
|||||||
class ArFile(BytesIO):
|
class ArFile(BytesIO):
|
||||||
"""Represents a file which resides in the archive"""
|
"""Represents a file which resides in the archive"""
|
||||||
def __init__(self, fn, uid, gid, mode, buf):
|
def __init__(self, fn, uid, gid, mode, buf):
|
||||||
BytesIO.__init__(self, buf)
|
super().__init__(buf)
|
||||||
self.name = fn
|
self.name = fn
|
||||||
self.uid = uid
|
self.uid = uid
|
||||||
self.gid = gid
|
self.gid = gid
|
||||||
@ -97,9 +97,9 @@ class Ar:
|
|||||||
re.DOTALL)
|
re.DOTALL)
|
||||||
|
|
||||||
def __init__(self, fn = None, fh = None):
|
def __init__(self, fn = None, fh = None):
|
||||||
if fn == None and fh == None:
|
if fn is None and fh is None:
|
||||||
raise ValueError('either \'fn\' or \'fh\' must be != None')
|
raise ValueError('either \'fn\' or \'fh\' must be is not None')
|
||||||
if fh != None:
|
if fh is not None:
|
||||||
self.__file = fh
|
self.__file = fh
|
||||||
self.__closefile = False
|
self.__closefile = False
|
||||||
self.filename = fh.name
|
self.filename = fh.name
|
||||||
@ -169,7 +169,7 @@ class Ar:
|
|||||||
if data != b'!<arch>':
|
if data != b'!<arch>':
|
||||||
raise ArError(self.filename, 'no ar archive')
|
raise ArError(self.filename, 'no ar archive')
|
||||||
pos = 8
|
pos = 8
|
||||||
while (len(data) != 0):
|
while len(data) != 0:
|
||||||
self.__file.seek(pos, os.SEEK_SET)
|
self.__file.seek(pos, os.SEEK_SET)
|
||||||
data = self.__file.read(self.hdr_len)
|
data = self.__file.read(self.hdr_len)
|
||||||
if not data:
|
if not data:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import tarfile
|
import sys
|
||||||
|
|
||||||
from . import packagequery
|
from . import packagequery
|
||||||
|
|
||||||
@ -51,14 +51,14 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
|
|||||||
|
|
||||||
def version(self):
|
def version(self):
|
||||||
pkgver = self.fields['pkgver'][0] if 'pkgver' in self.fields else None
|
pkgver = self.fields['pkgver'][0] if 'pkgver' in self.fields else None
|
||||||
if pkgver != None:
|
if pkgver is not None:
|
||||||
pkgver = re.sub(br'[0-9]+:', b'', pkgver, 1)
|
pkgver = re.sub(br'[0-9]+:', b'', pkgver, 1)
|
||||||
pkgver = re.sub(br'-[^-]*$', b'', pkgver)
|
pkgver = re.sub(br'-[^-]*$', b'', pkgver)
|
||||||
return pkgver
|
return pkgver
|
||||||
|
|
||||||
def release(self):
|
def release(self):
|
||||||
pkgver = self.fields['pkgver'][0] if 'pkgver' in self.fields else None
|
pkgver = self.fields['pkgver'][0] if 'pkgver' in self.fields else None
|
||||||
if pkgver != None:
|
if pkgver is not None:
|
||||||
m = re.search(br'-([^-])*$', pkgver)
|
m = re.search(br'-([^-])*$', pkgver)
|
||||||
if m:
|
if m:
|
||||||
return m.group(1)
|
return m.group(1)
|
||||||
@ -207,7 +207,6 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
|
||||||
archq = ArchQuery.query(sys.argv[1])
|
archq = ArchQuery.query(sys.argv[1])
|
||||||
print(archq.name(), archq.version(), archq.release(), archq.arch())
|
print(archq.name(), archq.version(), archq.release(), archq.arch())
|
||||||
try:
|
try:
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
||||||
|
|
||||||
|
|
||||||
import mmap
|
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
import struct
|
import struct
|
||||||
@ -30,7 +29,7 @@ if not hasattr(os, 'SEEK_SET'):
|
|||||||
class CpioError(Exception):
|
class CpioError(Exception):
|
||||||
"""base class for all cpio related errors"""
|
"""base class for all cpio related errors"""
|
||||||
def __init__(self, fn, msg):
|
def __init__(self, fn, msg):
|
||||||
Exception.__init__(self)
|
super().__init__()
|
||||||
self.file = fn
|
self.file = fn
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -153,7 +152,7 @@ class CpioRead:
|
|||||||
if not self.format in self.sfmt.values():
|
if not self.format in self.sfmt.values():
|
||||||
raise CpioError(self.filename, '\'%s\' is not a supported cpio format' % self.format)
|
raise CpioError(self.filename, '\'%s\' is not a supported cpio format' % self.format)
|
||||||
pos = 0
|
pos = 0
|
||||||
while (len(data) != 0):
|
while len(data) != 0:
|
||||||
self.__file.seek(pos, os.SEEK_SET)
|
self.__file.seek(pos, os.SEEK_SET)
|
||||||
data = self.__file.read(self.hdr_len)
|
data = self.__file.read(self.hdr_len)
|
||||||
if not data:
|
if not data:
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
import tarfile
|
import tarfile
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
@ -58,11 +59,11 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
|
|||||||
else:
|
else:
|
||||||
control = arfile.get_file(b'control.tar.xz')
|
control = arfile.get_file(b'control.tar.xz')
|
||||||
if control:
|
if control:
|
||||||
if not HAVE_LZMA:
|
if not HAVE_LZMA:
|
||||||
raise DebError(self.__path, 'can\'t open control.tar.xz without python-lzma')
|
raise DebError(self.__path, 'can\'t open control.tar.xz without python-lzma')
|
||||||
decompressed = lzma.decompress(control.read())
|
decompressed = lzma.decompress(control.read())
|
||||||
tar = tarfile.open(name="control.tar.xz",
|
tar = tarfile.open(name="control.tar.xz",
|
||||||
fileobj=BytesIO(decompressed))
|
fileobj=BytesIO(decompressed))
|
||||||
else:
|
else:
|
||||||
control = arfile.get_file(b'control.tar.zst')
|
control = arfile.get_file(b'control.tar.zst')
|
||||||
if control:
|
if control:
|
||||||
@ -249,7 +250,6 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
|
|||||||
return b'%s_%s_%s.deb' % (name, version, arch)
|
return b'%s_%s_%s.deb' % (name, version, arch)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
|
||||||
try:
|
try:
|
||||||
debq = DebQuery.query(sys.argv[1])
|
debq = DebQuery.query(sys.argv[1])
|
||||||
except DebError as e:
|
except DebError as e:
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
# either version 2, or (at your option) any later version.
|
# either version 2, or (at your option) any later version.
|
||||||
|
|
||||||
|
|
||||||
|
import builtins
|
||||||
import html
|
import html
|
||||||
|
|
||||||
from .. import oscerr
|
from .. import oscerr
|
||||||
@ -39,7 +40,6 @@ def decode_it(obj):
|
|||||||
|
|
||||||
|
|
||||||
def raw_input(*args):
|
def raw_input(*args):
|
||||||
import builtins
|
|
||||||
func = builtins.input
|
func = builtins.input
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
from .helper import decode_it
|
from .helper import decode_it
|
||||||
|
|
||||||
|
|
||||||
class PackageError(Exception):
|
class PackageError(Exception):
|
||||||
"""base class for all package related errors"""
|
"""base class for all package related errors"""
|
||||||
def __init__(self, fname, msg):
|
def __init__(self, fname, msg):
|
||||||
Exception.__init__(self)
|
super().__init__()
|
||||||
self.fname = fname
|
self.fname = fname
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
|
|
||||||
@ -166,7 +168,6 @@ def cmp(a, b):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
|
||||||
try:
|
try:
|
||||||
pkgq = PackageQuery.query(sys.argv[1])
|
pkgq = PackageQuery.query(sys.argv[1])
|
||||||
except PackageError as e:
|
except PackageError as e:
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import struct
|
import struct
|
||||||
|
import sys
|
||||||
from . import packagequery
|
from . import packagequery
|
||||||
|
|
||||||
from .helper import decode_it
|
from .helper import decode_it
|
||||||
@ -385,7 +386,6 @@ def unpack_string(data, encoding=None):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
|
||||||
try:
|
try:
|
||||||
rpmq = RpmQuery.query(sys.argv[1])
|
rpmq = RpmQuery.query(sys.argv[1])
|
||||||
except RpmError as e:
|
except RpmError as e:
|
||||||
|
@ -32,8 +32,8 @@ def urlcompare(url, *args):
|
|||||||
query_args2 = parse_qs(components2.query)
|
query_args2 = parse_qs(components2.query)
|
||||||
components2 = components2._replace(query=None)
|
components2 = components2._replace(query=None)
|
||||||
|
|
||||||
if components != components2 or \
|
if components != components2 or \
|
||||||
query_args != query_args2:
|
query_args != query_args2:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@ -64,8 +64,9 @@ def xml_equal(actual, exp):
|
|||||||
|
|
||||||
class RequestWrongOrder(Exception):
|
class RequestWrongOrder(Exception):
|
||||||
"""raised if an unexpected request is issued to urllib2"""
|
"""raised if an unexpected request is issued to urllib2"""
|
||||||
|
|
||||||
def __init__(self, url, exp_url, method, exp_method):
|
def __init__(self, url, exp_url, method, exp_method):
|
||||||
Exception.__init__(self)
|
super().__init__()
|
||||||
self.url = url
|
self.url = url
|
||||||
self.exp_url = exp_url
|
self.exp_url = exp_url
|
||||||
self.method = method
|
self.method = method
|
||||||
@ -74,8 +75,10 @@ class RequestWrongOrder(Exception):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '%s, %s, %s, %s' % (self.url, self.exp_url, self.method, self.exp_method)
|
return '%s, %s, %s, %s' % (self.url, self.exp_url, self.method, self.exp_method)
|
||||||
|
|
||||||
|
|
||||||
class RequestDataMismatch(Exception):
|
class RequestDataMismatch(Exception):
|
||||||
"""raised if POSTed or PUTed data doesn't match with the expected data"""
|
"""raised if POSTed or PUTed data doesn't match with the expected data"""
|
||||||
|
|
||||||
def __init__(self, url, got, exp):
|
def __init__(self, url, got, exp):
|
||||||
self.url = url
|
self.url = url
|
||||||
self.got = got
|
self.got = got
|
||||||
@ -91,7 +94,7 @@ EXPECTED_REQUESTS = []
|
|||||||
# HACK: Fix "ValueError: I/O operation on closed file." error in tests on openSUSE Leap 15.2.
|
# HACK: Fix "ValueError: I/O operation on closed file." error in tests on openSUSE Leap 15.2.
|
||||||
# The problem seems to appear only in the tests, possibly some interaction with MockHTTPConnectionPool.
|
# The problem seems to appear only in the tests, possibly some interaction with MockHTTPConnectionPool.
|
||||||
# Porting 753fbc03 to urllib3 in openSUSE Leap 15.2 would fix the problem.
|
# Porting 753fbc03 to urllib3 in openSUSE Leap 15.2 would fix the problem.
|
||||||
urllib3.response.HTTPResponse.__iter__ = lambda self : iter(self._fp)
|
urllib3.response.HTTPResponse.__iter__ = lambda self: iter(self._fp)
|
||||||
|
|
||||||
|
|
||||||
class MockHTTPConnectionPool:
|
class MockHTTPConnectionPool:
|
||||||
@ -290,7 +293,7 @@ class OscTestCase(unittest.TestCase):
|
|||||||
self.assertEqual(got, exp)
|
self.assertEqual(got, exp)
|
||||||
else:
|
else:
|
||||||
start_delim = "\n" + (" 8< ".join(["-----"] * 8)) + "\n"
|
start_delim = "\n" + (" 8< ".join(["-----"] * 8)) + "\n"
|
||||||
end_delim = "\n" + (" >8 ".join(["-----"] * 8)) + "\n\n"
|
end_delim = "\n" + (" >8 ".join(["-----"] * 8)) + "\n\n"
|
||||||
self.assertEqual(got, exp,
|
self.assertEqual(got, exp,
|
||||||
"got:" + start_delim + got + end_delim +
|
"got:" + start_delim + got + end_delim +
|
||||||
"expected:" + start_delim + exp + end_delim)
|
"expected:" + start_delim + exp + end_delim)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -9,10 +10,11 @@ from .common import OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'addfile_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'addfile_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestAddFiles)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestAddFiles)
|
||||||
|
|
||||||
|
|
||||||
class TestAddFiles(OscTestCase):
|
class TestAddFiles(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -83,6 +85,6 @@ class TestAddFiles(OscTestCase):
|
|||||||
self.assertRaises(osc.oscerr.OscIOError, p.addfile, 'doesnotexist')
|
self.assertRaises(osc.oscerr.OscIOError, p.addfile, 'doesnotexist')
|
||||||
self.assertFalse(os.path.exists(os.path.join('.osc', '_to_be_added')))
|
self.assertFalse(os.path.exists(os.path.join('.osc', '_to_be_added')))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import unittest
|
||||||
from urllib.error import HTTPError
|
from urllib.error import HTTPError
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
|
|
||||||
@ -11,12 +12,14 @@ from .common import GET, PUT, POST, DELETE, OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'commit_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'commit_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestCommit)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestCommit)
|
||||||
|
|
||||||
|
|
||||||
rev_dummy = '<revision rev="repository">\n <srcmd5>empty</srcmd5>\n</revision>'
|
rev_dummy = '<revision rev="repository">\n <srcmd5>empty</srcmd5>\n</revision>'
|
||||||
|
|
||||||
|
|
||||||
class TestCommit(OscTestCase):
|
class TestCommit(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -27,7 +30,7 @@ class TestCommit(OscTestCase):
|
|||||||
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin&withvalidate=1',
|
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin&withvalidate=1',
|
||||||
file='testSimple_missingfilelist', expfile='testSimple_lfilelist')
|
file='testSimple_missingfilelist', expfile='testSimple_lfilelist')
|
||||||
@PUT('http://localhost/source/osctest/simple/nochange?rev=repository',
|
@PUT('http://localhost/source/osctest/simple/nochange?rev=repository',
|
||||||
exp='This file didn\'t change but\nis modified.\n', text=rev_dummy)
|
exp='This file didn\'t change but\nis modified.\n', text=rev_dummy)
|
||||||
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin',
|
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin',
|
||||||
file='testSimple_cfilesremote', expfile='testSimple_lfilelist')
|
file='testSimple_cfilesremote', expfile='testSimple_lfilelist')
|
||||||
@GET('http://localhost/search/request?match=%28state%2F%40name%3D%27new%27+or+state%2F%40name%3D%27review%27%29+and+%28action%2Ftarget%2F%40project%3D%27osctest%27+or+action%2Fsource%2F%40project%3D%27osctest%27%29+and+%28action%2Ftarget%2F%40package%3D%27simple%27+or+action%2Fsource%2F%40package%3D%27simple%27%29', file='testOpenRequests')
|
@GET('http://localhost/search/request?match=%28state%2F%40name%3D%27new%27+or+state%2F%40name%3D%27review%27%29+and+%28action%2Ftarget%2F%40project%3D%27osctest%27+or+action%2Fsource%2F%40project%3D%27osctest%27%29+and+%28action%2Ftarget%2F%40package%3D%27simple%27+or+action%2Fsource%2F%40package%3D%27simple%27%29', file='testOpenRequests')
|
||||||
@ -183,7 +186,7 @@ class TestCommit(OscTestCase):
|
|||||||
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin&withvalidate=1',
|
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin&withvalidate=1',
|
||||||
file='testSimple_missingfilelist', expfile='testSimple_lfilelist')
|
file='testSimple_missingfilelist', expfile='testSimple_lfilelist')
|
||||||
@PUT('http://localhost/source/osctest/simple/nochange?rev=repository', exp='This file didn\'t change but\nis modified.\n',
|
@PUT('http://localhost/source/osctest/simple/nochange?rev=repository', exp='This file didn\'t change but\nis modified.\n',
|
||||||
exception=IOError('test exception'), text=rev_dummy)
|
exception=IOError('test exception'), text=rev_dummy)
|
||||||
def test_interrupt(self):
|
def test_interrupt(self):
|
||||||
"""interrupt a commit"""
|
"""interrupt a commit"""
|
||||||
self._change_to_pkg('simple')
|
self._change_to_pkg('simple')
|
||||||
@ -307,7 +310,7 @@ class TestCommit(OscTestCase):
|
|||||||
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin&withvalidate=1',
|
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin&withvalidate=1',
|
||||||
file='testSimple_missingfilelist', expfile='testSimple_lfilelist')
|
file='testSimple_missingfilelist', expfile='testSimple_lfilelist')
|
||||||
@PUT('http://localhost/source/osctest/simple/nochange?rev=repository',
|
@PUT('http://localhost/source/osctest/simple/nochange?rev=repository',
|
||||||
exp='This file didn\'t change but\nis modified.\n', text=rev_dummy)
|
exp='This file didn\'t change but\nis modified.\n', text=rev_dummy)
|
||||||
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin',
|
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin',
|
||||||
expfile='testSimple_lfilelist', text='an error occured', code=500)
|
expfile='testSimple_lfilelist', text='an error occured', code=500)
|
||||||
def test_commitfilelist_error(self):
|
def test_commitfilelist_error(self):
|
||||||
@ -328,7 +331,7 @@ class TestCommit(OscTestCase):
|
|||||||
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin',
|
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin',
|
||||||
file='testSimple_missingfilelistwithSHAsum', expfile='testSimple_lfilelistwithSHA')
|
file='testSimple_missingfilelistwithSHAsum', expfile='testSimple_lfilelistwithSHA')
|
||||||
@PUT('http://localhost/source/osctest/simple/nochange?rev=repository',
|
@PUT('http://localhost/source/osctest/simple/nochange?rev=repository',
|
||||||
exp='This file didn\'t change but\nis modified.\n', text=rev_dummy)
|
exp='This file didn\'t change but\nis modified.\n', text=rev_dummy)
|
||||||
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin',
|
@POST('http://localhost/source/osctest/simple?comment=&cmd=commitfilelist&user=Admin',
|
||||||
file='testSimple_cfilesremote', expfile='testSimple_lfilelistwithSHA')
|
file='testSimple_cfilesremote', expfile='testSimple_lfilelistwithSHA')
|
||||||
@GET('http://localhost/search/request?match=%28state%2F%40name%3D%27new%27+or+state%2F%40name%3D%27review%27%29+and+%28action%2Ftarget%2F%40project%3D%27osctest%27+or+action%2Fsource%2F%40project%3D%27osctest%27%29+and+%28action%2Ftarget%2F%40package%3D%27simple%27+or+action%2Fsource%2F%40package%3D%27simple%27%29', file='testOpenRequests')
|
@GET('http://localhost/search/request?match=%28state%2F%40name%3D%27new%27+or+state%2F%40name%3D%27review%27%29+and+%28action%2Ftarget%2F%40project%3D%27osctest%27+or+action%2Fsource%2F%40project%3D%27osctest%27%29+and+%28action%2Ftarget%2F%40package%3D%27simple%27+or+action%2Fsource%2F%40package%3D%27simple%27%29', file='testOpenRequests')
|
||||||
@ -368,6 +371,6 @@ class TestCommit(OscTestCase):
|
|||||||
self._check_status(p, 'add', '!')
|
self._check_status(p, 'add', '!')
|
||||||
self._check_status(p, 'bar', ' ')
|
self._check_status(p, 'bar', ' ')
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
import unittest
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -48,5 +48,4 @@ class TestParseRevisionOption(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -8,10 +9,11 @@ from .common import OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'deletefile_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'deletefile_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestDeleteFiles)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestDeleteFiles)
|
||||||
|
|
||||||
|
|
||||||
class TestDeleteFiles(OscTestCase):
|
class TestDeleteFiles(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -205,6 +207,6 @@ class TestDeleteFiles(OscTestCase):
|
|||||||
self.assertTrue(ret[0] == exp1)
|
self.assertTrue(ret[0] == exp1)
|
||||||
self.assertTrue(ret[1] == exp2)
|
self.assertTrue(ret[1] == exp2)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -10,12 +11,14 @@ from .common import GET, OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'difffile_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'difffile_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestDiffFiles)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestDiffFiles)
|
||||||
|
|
||||||
|
|
||||||
class TestDiffFiles(OscTestCase):
|
class TestDiffFiles(OscTestCase):
|
||||||
diff_hdr = 'Index: %s\n==================================================================='
|
diff_hdr = 'Index: %s\n==================================================================='
|
||||||
|
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
|
|
||||||
@ -335,6 +338,6 @@ Binary file 'binary' has changed.
|
|||||||
exp = __canonise_diff(exp)
|
exp = __canonise_diff(exp)
|
||||||
self.assertEqualMultiline(got, exp)
|
self.assertEqualMultiline(got, exp)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -6,6 +6,7 @@ from osc.util.helper import decode_it, decode_list
|
|||||||
def suite():
|
def suite():
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestResults)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestResults)
|
||||||
|
|
||||||
|
|
||||||
class TestResults(unittest.TestCase):
|
class TestResults(unittest.TestCase):
|
||||||
def testDecodeList(self):
|
def testDecodeList(self):
|
||||||
strlist = ['Test1', 'Test2', 'Test3']
|
strlist = ['Test1', 'Test2', 'Test3']
|
||||||
@ -21,7 +22,6 @@ class TestResults(unittest.TestCase):
|
|||||||
out = decode_list(byteslist)
|
out = decode_list(byteslist)
|
||||||
self.assertListEqual(out, strlist)
|
self.assertListEqual(out, strlist)
|
||||||
|
|
||||||
|
|
||||||
def testDecodeIt(self):
|
def testDecodeIt(self):
|
||||||
bytes_obj = b'Test the decoding'
|
bytes_obj = b'Test the decoding'
|
||||||
string_obj = 'Test the decoding'
|
string_obj = 'Test the decoding'
|
||||||
@ -32,6 +32,6 @@ class TestResults(unittest.TestCase):
|
|||||||
out = decode_it(string_obj)
|
out = decode_it(string_obj)
|
||||||
self.assertEqual(out, string_obj)
|
self.assertEqual(out, string_obj)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -8,10 +9,11 @@ from .common import OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'init_package_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'init_package_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestInitPackage)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestInitPackage)
|
||||||
|
|
||||||
|
|
||||||
class TestInitPackage(OscTestCase):
|
class TestInitPackage(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
# workaround for git because it doesn't allow empty dirs
|
# workaround for git because it doesn't allow empty dirs
|
||||||
@ -22,7 +24,7 @@ class TestInitPackage(OscTestCase):
|
|||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
if os.path.exists(os.path.join(FIXTURES_DIR, 'osctest')):
|
if os.path.exists(os.path.join(FIXTURES_DIR, 'osctest')):
|
||||||
os.rmdir(os.path.join(FIXTURES_DIR, 'osctest'))
|
os.rmdir(os.path.join(FIXTURES_DIR, 'osctest'))
|
||||||
OscTestCase.tearDown(self)
|
super().tearDown()
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
"""initialize a package dir"""
|
"""initialize a package dir"""
|
||||||
@ -88,6 +90,6 @@ class TestInitPackage(OscTestCase):
|
|||||||
f.write('foo\n')
|
f.write('foo\n')
|
||||||
self.assertRaises(osc.oscerr.OscIOError, osc.core.Package.init_package, 'http://localhost', 'osctest', 'testpkg', pac_dir)
|
self.assertRaises(osc.oscerr.OscIOError, osc.core.Package.init_package, 'http://localhost', 'osctest', 'testpkg', pac_dir)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import osc.conf
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
|
|
||||||
@ -8,10 +10,11 @@ from .common import GET, OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'init_project_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'init_project_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestInitProject)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestInitProject)
|
||||||
|
|
||||||
|
|
||||||
class TestInitProject(OscTestCase):
|
class TestInitProject(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
# workaround for git because it doesn't allow empty dirs
|
# workaround for git because it doesn't allow empty dirs
|
||||||
@ -22,7 +25,7 @@ class TestInitProject(OscTestCase):
|
|||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
if os.path.exists(os.path.join(FIXTURES_DIR, 'osctest')):
|
if os.path.exists(os.path.join(FIXTURES_DIR, 'osctest')):
|
||||||
os.rmdir(os.path.join(FIXTURES_DIR, 'osctest'))
|
os.rmdir(os.path.join(FIXTURES_DIR, 'osctest'))
|
||||||
OscTestCase.tearDown(self)
|
super().tearDown()
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
"""initialize a project dir"""
|
"""initialize a project dir"""
|
||||||
@ -58,7 +61,6 @@ class TestInitProject(OscTestCase):
|
|||||||
disable wc_check (because we didn't disable the package tracking before the Project class
|
disable wc_check (because we didn't disable the package tracking before the Project class
|
||||||
was imported therefore REQ_STOREFILES contains '_packages')
|
was imported therefore REQ_STOREFILES contains '_packages')
|
||||||
"""
|
"""
|
||||||
import osc.conf
|
|
||||||
# disable package tracking
|
# disable package tracking
|
||||||
osc.conf.config['do_package_tracking'] = False
|
osc.conf.config['do_package_tracking'] = False
|
||||||
prj_dir = os.path.join(self.tmpdir, 'testprj')
|
prj_dir = os.path.join(self.tmpdir, 'testprj')
|
||||||
@ -70,6 +72,6 @@ class TestInitProject(OscTestCase):
|
|||||||
self._check_list(os.path.join(storedir, '_apiurl'), 'http://localhost\n')
|
self._check_list(os.path.join(storedir, '_apiurl'), 'http://localhost\n')
|
||||||
self.assertFalse(os.path.exists(os.path.join(storedir, '_packages')))
|
self.assertFalse(os.path.exists(os.path.join(storedir, '_packages')))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -8,10 +9,11 @@ from .common import OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'project_package_status_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'project_package_status_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestPackageStatus)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestPackageStatus)
|
||||||
|
|
||||||
|
|
||||||
class TestPackageStatus(OscTestCase):
|
class TestPackageStatus(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -21,7 +23,7 @@ class TestPackageStatus(OscTestCase):
|
|||||||
self._change_to_pkg('simple')
|
self._change_to_pkg('simple')
|
||||||
p = osc.core.Package('.')
|
p = osc.core.Package('.')
|
||||||
exp_st = [('A', 'add'), ('?', 'exists'), ('D', 'foo'), ('!', 'merge'), ('R', 'missing'),
|
exp_st = [('A', 'add'), ('?', 'exists'), ('D', 'foo'), ('!', 'merge'), ('R', 'missing'),
|
||||||
('!', 'missing_added'), ('M', 'nochange'), ('S', 'skipped'), (' ', 'test')]
|
('!', 'missing_added'), ('M', 'nochange'), ('S', 'skipped'), (' ', 'test')]
|
||||||
st = p.get_status()
|
st = p.get_status()
|
||||||
self.assertEqual(exp_st, st)
|
self.assertEqual(exp_st, st)
|
||||||
|
|
||||||
@ -84,6 +86,6 @@ class TestPackageStatus(OscTestCase):
|
|||||||
st = p.get_status(True)
|
st = p.get_status(True)
|
||||||
self.assertEqual(exp_st, st)
|
self.assertEqual(exp_st, st)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.commandline
|
import osc.commandline
|
||||||
import osc.core
|
import osc.core
|
||||||
@ -11,12 +12,14 @@ from .common import GET, POST, OscTestCase, EXPECTED_REQUESTS
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'prdiff_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'prdiff_fixtures')
|
||||||
UPSTREAM = 'some:project'
|
UPSTREAM = 'some:project'
|
||||||
BRANCH = 'home:user:branches:' + UPSTREAM
|
BRANCH = 'home:user:branches:' + UPSTREAM
|
||||||
|
|
||||||
|
|
||||||
def rdiff_url(pkg, oldprj, newprj):
|
def rdiff_url(pkg, oldprj, newprj):
|
||||||
return 'http://localhost/source/%s/%s?unified=1&opackage=%s&oproject=%s&cmd=diff&expand=1&tarlimit=0&filelimit=0' % \
|
return 'http://localhost/source/%s/%s?unified=1&opackage=%s&oproject=%s&cmd=diff&expand=1&tarlimit=0&filelimit=0' % \
|
||||||
(newprj, pkg, pkg, oldprj.replace(':', '%3A'))
|
(newprj, pkg, pkg, oldprj.replace(':', '%3A'))
|
||||||
|
|
||||||
|
|
||||||
def request_url(prj):
|
def request_url(prj):
|
||||||
return 'http://localhost/search/request?match=%%28state%%2F%%40name%%3D%%27new%%27+or+state%%2F%%40name%%3D%%27review%%27%%29+and+%%28action%%2Ftarget%%2F%%40project%%3D%%27%s%%27+or+action%%2Fsource%%2F%%40project%%3D%%27%s%%27%%29' % \
|
return 'http://localhost/search/request?match=%%28state%%2F%%40name%%3D%%27new%%27+or+state%%2F%%40name%%3D%%27review%%27%%29+and+%%28action%%2Ftarget%%2F%%40project%%3D%%27%s%%27+or+action%%2Fsource%%2F%%40project%%3D%%27%s%%27%%29' % \
|
||||||
tuple([prj.replace(':', '%3A')] * 2)
|
tuple([prj.replace(':', '%3A')] * 2)
|
||||||
@ -42,11 +45,12 @@ def POST_RDIFF(oldprj, newprj):
|
|||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestProjectDiff)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestProjectDiff)
|
||||||
|
|
||||||
|
|
||||||
class TestProjectDiff(OscTestCase):
|
class TestProjectDiff(OscTestCase):
|
||||||
diff_hdr = 'Index: %s\n==================================================================='
|
diff_hdr = 'Index: %s\n==================================================================='
|
||||||
|
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
|
|
||||||
@ -61,13 +65,11 @@ class TestProjectDiff(OscTestCase):
|
|||||||
cli.main(argv=argv)
|
cli.main(argv=argv)
|
||||||
return sys.stdout.getvalue()
|
return sys.stdout.getvalue()
|
||||||
|
|
||||||
|
|
||||||
def testPrdiffTooManyArgs(self):
|
def testPrdiffTooManyArgs(self):
|
||||||
def runner():
|
def runner():
|
||||||
self._run_prdiff('one', 'two', 'superfluous-arg')
|
self._run_prdiff('one', 'two', 'superfluous-arg')
|
||||||
self.assertRaises(osc.oscerr.WrongArgs, runner)
|
self.assertRaises(osc.oscerr.WrongArgs, runner)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES(UPSTREAM, BRANCH)
|
@GET_PROJECT_PACKAGES(UPSTREAM, BRANCH)
|
||||||
@POST_RDIFF(UPSTREAM, BRANCH)
|
@POST_RDIFF(UPSTREAM, BRANCH)
|
||||||
@POST(rdiff_url('only-in-new', UPSTREAM, BRANCH), exp='', text='')
|
@POST(rdiff_url('only-in-new', UPSTREAM, BRANCH), exp='', text='')
|
||||||
@ -77,6 +79,7 @@ differs: common-two
|
|||||||
identical: common-three
|
identical: common-three
|
||||||
identical: only-in-new
|
identical: only-in-new
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def runner():
|
def runner():
|
||||||
self._run_prdiff()
|
self._run_prdiff()
|
||||||
|
|
||||||
@ -90,7 +93,6 @@ identical: only-in-new
|
|||||||
out = self._run_prdiff()
|
out = self._run_prdiff()
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES(UPSTREAM, BRANCH)
|
@GET_PROJECT_PACKAGES(UPSTREAM, BRANCH)
|
||||||
@POST_RDIFF(UPSTREAM, BRANCH)
|
@POST_RDIFF(UPSTREAM, BRANCH)
|
||||||
@POST(rdiff_url('only-in-new', UPSTREAM, BRANCH), exp='', text='')
|
@POST(rdiff_url('only-in-new', UPSTREAM, BRANCH), exp='', text='')
|
||||||
@ -104,7 +106,6 @@ identical: only-in-new
|
|||||||
out = self._run_prdiff('home:user:branches:some:project')
|
out = self._run_prdiff('home:user:branches:some:project')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
||||||
@POST_RDIFF('old:prj', 'new:prj')
|
@POST_RDIFF('old:prj', 'new:prj')
|
||||||
def testPrdiffTwoArgs(self):
|
def testPrdiffTwoArgs(self):
|
||||||
@ -116,7 +117,6 @@ identical: common-three
|
|||||||
out = self._run_prdiff('old:prj', 'new:prj')
|
out = self._run_prdiff('old:prj', 'new:prj')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
||||||
@POST_RDIFF('old:prj', 'new:prj')
|
@POST_RDIFF('old:prj', 'new:prj')
|
||||||
def testPrdiffOldOnly(self):
|
def testPrdiffOldOnly(self):
|
||||||
@ -129,7 +129,6 @@ old only: only-in-old
|
|||||||
out = self._run_prdiff('--show-not-in-new', 'old:prj', 'new:prj')
|
out = self._run_prdiff('--show-not-in-new', 'old:prj', 'new:prj')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
||||||
@POST_RDIFF('old:prj', 'new:prj')
|
@POST_RDIFF('old:prj', 'new:prj')
|
||||||
def testPrdiffNewOnly(self):
|
def testPrdiffNewOnly(self):
|
||||||
@ -142,7 +141,6 @@ new only: only-in-new
|
|||||||
out = self._run_prdiff('--show-not-in-old', 'old:prj', 'new:prj')
|
out = self._run_prdiff('--show-not-in-old', 'old:prj', 'new:prj')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
||||||
@POST_RDIFF('old:prj', 'new:prj')
|
@POST_RDIFF('old:prj', 'new:prj')
|
||||||
def testPrdiffDiffstat(self):
|
def testPrdiffDiffstat(self):
|
||||||
@ -158,7 +156,6 @@ identical: common-three
|
|||||||
out = self._run_prdiff('--diffstat', 'old:prj', 'new:prj')
|
out = self._run_prdiff('--diffstat', 'old:prj', 'new:prj')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
||||||
@POST_RDIFF('old:prj', 'new:prj')
|
@POST_RDIFF('old:prj', 'new:prj')
|
||||||
def testPrdiffUnified(self):
|
def testPrdiffUnified(self):
|
||||||
@ -182,9 +179,8 @@ identical: common-three
|
|||||||
out = self._run_prdiff('--unified', 'old:prj', 'new:prj')
|
out = self._run_prdiff('--unified', 'old:prj', 'new:prj')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
||||||
@POST(rdiff_url('common-two', 'old:prj', 'new:prj'), exp='', file='common-two-diff')
|
@POST(rdiff_url('common-two', 'old:prj', 'new:prj'), exp='', file='common-two-diff')
|
||||||
@POST(rdiff_url('common-three', 'old:prj', 'new:prj'), exp='', text='')
|
@POST(rdiff_url('common-three', 'old:prj', 'new:prj'), exp='', text='')
|
||||||
def testPrdiffInclude(self):
|
def testPrdiffInclude(self):
|
||||||
self._change_to_tmpdir()
|
self._change_to_tmpdir()
|
||||||
@ -195,9 +191,8 @@ identical: common-three
|
|||||||
'old:prj', 'new:prj')
|
'old:prj', 'new:prj')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
||||||
@POST(rdiff_url('common-two', 'old:prj', 'new:prj'), exp='', file='common-two-diff')
|
@POST(rdiff_url('common-two', 'old:prj', 'new:prj'), exp='', file='common-two-diff')
|
||||||
@POST(rdiff_url('common-three', 'old:prj', 'new:prj'), exp='', text='')
|
@POST(rdiff_url('common-three', 'old:prj', 'new:prj'), exp='', text='')
|
||||||
def testPrdiffExclude(self):
|
def testPrdiffExclude(self):
|
||||||
self._change_to_tmpdir()
|
self._change_to_tmpdir()
|
||||||
@ -207,9 +202,8 @@ identical: common-three
|
|||||||
out = self._run_prdiff('--exclude', 'one', 'old:prj', 'new:prj')
|
out = self._run_prdiff('--exclude', 'one', 'old:prj', 'new:prj')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
@GET_PROJECT_PACKAGES('old:prj', 'new:prj')
|
||||||
@POST(rdiff_url('common-two', 'old:prj', 'new:prj'), exp='', file='common-two-diff')
|
@POST(rdiff_url('common-two', 'old:prj', 'new:prj'), exp='', file='common-two-diff')
|
||||||
def testPrdiffIncludeExclude(self):
|
def testPrdiffIncludeExclude(self):
|
||||||
self._change_to_tmpdir()
|
self._change_to_tmpdir()
|
||||||
exp = """differs: common-two
|
exp = """differs: common-two
|
||||||
@ -219,13 +213,12 @@ identical: common-three
|
|||||||
'old:prj', 'new:prj')
|
'old:prj', 'new:prj')
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES(UPSTREAM, BRANCH)
|
@GET_PROJECT_PACKAGES(UPSTREAM, BRANCH)
|
||||||
@GET(request_url(UPSTREAM), exp='', file='request')
|
@GET(request_url(UPSTREAM), exp='', file='request')
|
||||||
@POST(rdiff_url('common-one', UPSTREAM, BRANCH), exp='', text='')
|
@POST(rdiff_url('common-one', UPSTREAM, BRANCH), exp='', text='')
|
||||||
@POST(rdiff_url('common-two', UPSTREAM, BRANCH), exp='', file='common-two-diff')
|
@POST(rdiff_url('common-two', UPSTREAM, BRANCH), exp='', file='common-two-diff')
|
||||||
@POST(rdiff_url('common-three', UPSTREAM, BRANCH), exp='', file='common-two-diff')
|
@POST(rdiff_url('common-three', UPSTREAM, BRANCH), exp='', file='common-two-diff')
|
||||||
@POST(rdiff_url('only-in-new', UPSTREAM, BRANCH), exp='', text='')
|
@POST(rdiff_url('only-in-new', UPSTREAM, BRANCH), exp='', text='')
|
||||||
def testPrdiffRequestsMatching(self):
|
def testPrdiffRequestsMatching(self):
|
||||||
self._change_to_tmpdir()
|
self._change_to_tmpdir()
|
||||||
exp = """identical: common-one
|
exp = """identical: common-one
|
||||||
@ -241,14 +234,14 @@ identical: only-in-new
|
|||||||
out = self._run_prdiff('--requests', UPSTREAM, BRANCH)
|
out = self._run_prdiff('--requests', UPSTREAM, BRANCH)
|
||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
# Reverse the direction of the diff.
|
# Reverse the direction of the diff.
|
||||||
|
|
||||||
@GET_PROJECT_PACKAGES(BRANCH, UPSTREAM)
|
@GET_PROJECT_PACKAGES(BRANCH, UPSTREAM)
|
||||||
@GET(request_url(BRANCH), exp='', file='no-requests')
|
@GET(request_url(BRANCH), exp='', file='no-requests')
|
||||||
@POST(rdiff_url('common-one', BRANCH, UPSTREAM), exp='', text='')
|
@POST(rdiff_url('common-one', BRANCH, UPSTREAM), exp='', text='')
|
||||||
@POST(rdiff_url('common-two', BRANCH, UPSTREAM), exp='', file='common-two-diff')
|
@POST(rdiff_url('common-two', BRANCH, UPSTREAM), exp='', file='common-two-diff')
|
||||||
@POST(rdiff_url('common-three', BRANCH, UPSTREAM), exp='', file='common-two-diff')
|
@POST(rdiff_url('common-three', BRANCH, UPSTREAM), exp='', file='common-two-diff')
|
||||||
@POST(rdiff_url('only-in-new', BRANCH, UPSTREAM), exp='', text='')
|
@POST(rdiff_url('only-in-new', BRANCH, UPSTREAM), exp='', text='')
|
||||||
def testPrdiffRequestsSwitched(self):
|
def testPrdiffRequestsSwitched(self):
|
||||||
self._change_to_tmpdir()
|
self._change_to_tmpdir()
|
||||||
exp = """identical: common-one
|
exp = """identical: common-one
|
||||||
@ -260,7 +253,5 @@ identical: only-in-new
|
|||||||
self.assertEqualMultiline(out, exp)
|
self.assertEqualMultiline(out, exp)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -8,10 +9,11 @@ from .common import OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'project_package_status_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'project_package_status_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestProjectStatus)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestProjectStatus)
|
||||||
|
|
||||||
|
|
||||||
class TestProjectStatus(OscTestCase):
|
class TestProjectStatus(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -93,7 +95,7 @@ class TestProjectStatus(OscTestCase):
|
|||||||
self._change_to_pkg('.')
|
self._change_to_pkg('.')
|
||||||
prj = osc.core.Project('.', getPackageList=False)
|
prj = osc.core.Project('.', getPackageList=False)
|
||||||
exp_st = [(' ', 'conflict'), (' ', 'simple'), ('A', 'added'), ('D', 'deleted'),
|
exp_st = [(' ', 'conflict'), (' ', 'simple'), ('A', 'added'), ('D', 'deleted'),
|
||||||
('!', 'missing'), ('!', 'added_deleted'), ('D', 'deleted_deleted'), ('?', 'excluded')]
|
('!', 'missing'), ('!', 'added_deleted'), ('D', 'deleted_deleted'), ('?', 'excluded')]
|
||||||
st = prj.get_status()
|
st = prj.get_status()
|
||||||
self.assertEqual(exp_st, st)
|
self.assertEqual(exp_st, st)
|
||||||
|
|
||||||
@ -159,6 +161,6 @@ class TestProjectStatus(OscTestCase):
|
|||||||
p = prj.get_pacobj('doesnotexist')
|
p = prj.get_pacobj('doesnotexist')
|
||||||
self.assertTrue(isinstance(p, type(None)))
|
self.assertTrue(isinstance(p, type(None)))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
import unittest
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
@ -10,10 +12,11 @@ from .common import GET, PUT, POST, DELETE, OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'repairwc_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'repairwc_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestRepairWC)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestRepairWC)
|
||||||
|
|
||||||
|
|
||||||
class TestRepairWC(OscTestCase):
|
class TestRepairWC(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -214,7 +217,6 @@ class TestRepairWC(OscTestCase):
|
|||||||
|
|
||||||
def test_project_noapiurl(self):
|
def test_project_noapiurl(self):
|
||||||
"""the project wc has no _apiurl file"""
|
"""the project wc has no _apiurl file"""
|
||||||
import shutil
|
|
||||||
prj_dir = os.path.join(self.tmpdir, 'prj_noapiurl')
|
prj_dir = os.path.join(self.tmpdir, 'prj_noapiurl')
|
||||||
shutil.copytree(os.path.join(self._get_fixtures_dir(), 'prj_noapiurl'), prj_dir)
|
shutil.copytree(os.path.join(self._get_fixtures_dir(), 'prj_noapiurl'), prj_dir)
|
||||||
storedir = os.path.join(prj_dir, osc.core.store)
|
storedir = os.path.join(prj_dir, osc.core.store)
|
||||||
@ -227,5 +229,4 @@ class TestRepairWC(OscTestCase):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import unittest
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
@ -9,22 +10,23 @@ from .common import OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'request_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'request_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestRequest)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestRequest)
|
||||||
|
|
||||||
|
|
||||||
class TestRequest(OscTestCase):
|
class TestRequest(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
OscTestCase.setUp(self, copytree=False)
|
super().setUp(copytree=False)
|
||||||
|
|
||||||
def test_createsr(self):
|
def test_createsr(self):
|
||||||
"""create a simple submitrequest"""
|
"""create a simple submitrequest"""
|
||||||
r = osc.core.Request()
|
r = osc.core.Request()
|
||||||
r.add_action('submit', src_project='foo', src_package='bar', src_rev='42',
|
r.add_action('submit', src_project='foo', src_package='bar', src_rev='42',
|
||||||
tgt_project='foobar', tgt_package='bar')
|
tgt_project='foobar', tgt_package='bar')
|
||||||
self.assertEqual(r.actions[0].type, 'submit')
|
self.assertEqual(r.actions[0].type, 'submit')
|
||||||
self.assertEqual(r.actions[0].src_project, 'foo')
|
self.assertEqual(r.actions[0].src_project, 'foo')
|
||||||
self.assertEqual(r.actions[0].src_package, 'bar')
|
self.assertEqual(r.actions[0].src_package, 'bar')
|
||||||
@ -49,7 +51,7 @@ class TestRequest(OscTestCase):
|
|||||||
"""create a simple submitrequest"""
|
"""create a simple submitrequest"""
|
||||||
r = osc.core.Request()
|
r = osc.core.Request()
|
||||||
r.add_action('submit', src_project='foo', src_package='bar',
|
r.add_action('submit', src_project='foo', src_package='bar',
|
||||||
tgt_project='foobar', tgt_package='bar', opt_sourceupdate='cleanup', opt_updatelink='1')
|
tgt_project='foobar', tgt_package='bar', opt_sourceupdate='cleanup', opt_updatelink='1')
|
||||||
self.assertEqual(r.actions[0].type, 'submit')
|
self.assertEqual(r.actions[0].type, 'submit')
|
||||||
self.assertEqual(r.actions[0].src_project, 'foo')
|
self.assertEqual(r.actions[0].src_project, 'foo')
|
||||||
self.assertEqual(r.actions[0].src_package, 'bar')
|
self.assertEqual(r.actions[0].src_package, 'bar')
|
||||||
@ -77,7 +79,7 @@ class TestRequest(OscTestCase):
|
|||||||
"""create a submitrequest with missing target package"""
|
"""create a submitrequest with missing target package"""
|
||||||
r = osc.core.Request()
|
r = osc.core.Request()
|
||||||
r.add_action('submit', src_project='foo', src_package='bar',
|
r.add_action('submit', src_project='foo', src_package='bar',
|
||||||
tgt_project='foobar')
|
tgt_project='foobar')
|
||||||
self.assertEqual(r.actions[0].type, 'submit')
|
self.assertEqual(r.actions[0].type, 'submit')
|
||||||
self.assertEqual(r.actions[0].src_project, 'foo')
|
self.assertEqual(r.actions[0].src_project, 'foo')
|
||||||
self.assertEqual(r.actions[0].src_package, 'bar')
|
self.assertEqual(r.actions[0].src_package, 'bar')
|
||||||
@ -148,7 +150,7 @@ class TestRequest(OscTestCase):
|
|||||||
"""create an add_role request (person+group element)"""
|
"""create an add_role request (person+group element)"""
|
||||||
r = osc.core.Request()
|
r = osc.core.Request()
|
||||||
r.add_action('add_role', tgt_project='foo', tgt_package='bar', person_name='user', person_role='reader',
|
r.add_action('add_role', tgt_project='foo', tgt_package='bar', person_name='user', person_role='reader',
|
||||||
group_name='group', group_role='reviewer')
|
group_name='group', group_role='reviewer')
|
||||||
self.assertEqual(r.actions[0].type, 'add_role')
|
self.assertEqual(r.actions[0].type, 'add_role')
|
||||||
self.assertEqual(r.actions[0].tgt_project, 'foo')
|
self.assertEqual(r.actions[0].tgt_project, 'foo')
|
||||||
self.assertEqual(r.actions[0].tgt_package, 'bar')
|
self.assertEqual(r.actions[0].tgt_package, 'bar')
|
||||||
@ -565,6 +567,6 @@ Comment: <no comment>"""
|
|||||||
self.assertTrue(len(r.get_actions('submit', 'delete', 'change_devel')) == 5)
|
self.assertTrue(len(r.get_actions('submit', 'delete', 'change_devel')) == 5)
|
||||||
self.assertTrue(len(r.get_actions()) == 8)
|
self.assertTrue(len(r.get_actions()) == 8)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.commandline
|
import osc.commandline
|
||||||
|
|
||||||
@ -7,12 +8,12 @@ from .common import GET, OscTestCase
|
|||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestResults)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestResults)
|
||||||
|
|
||||||
|
|
||||||
class TestResults(OscTestCase):
|
class TestResults(OscTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
OscTestCase.setUp(self, copytree=False)
|
super().setUp(copytree=False)
|
||||||
|
|
||||||
def _get_fixtures_name(self):
|
def _get_fixtures_name(self):
|
||||||
return 'results_fixtures'
|
return 'results_fixtures'
|
||||||
@ -31,13 +32,13 @@ class TestResults(OscTestCase):
|
|||||||
@GET('http://localhost/build/testproject/_result', file='result.xml')
|
@GET('http://localhost/build/testproject/_result', file='result.xml')
|
||||||
def testPrjresults(self):
|
def testPrjresults(self):
|
||||||
out = self._run_osc('prjresults', '--xml', 'testproject')
|
out = self._run_osc('prjresults', '--xml', 'testproject')
|
||||||
self.assertEqualMultiline(out, self._get_fixture('result.xml')+'\n')
|
self.assertEqualMultiline(out, self._get_fixture('result.xml') + '\n')
|
||||||
|
|
||||||
@GET('http://localhost/build/testproject/_result', file='result-dirty.xml')
|
@GET('http://localhost/build/testproject/_result', file='result-dirty.xml')
|
||||||
@GET('http://localhost/build/testproject/_result?oldstate=c57e2ee592dbbf26ebf19cc4f1bc1e83', file='result.xml')
|
@GET('http://localhost/build/testproject/_result?oldstate=c57e2ee592dbbf26ebf19cc4f1bc1e83', file='result.xml')
|
||||||
def testPrjresultsWatch(self):
|
def testPrjresultsWatch(self):
|
||||||
out = self._run_osc('prjresults', '--watch', '--xml', 'testproject')
|
out = self._run_osc('prjresults', '--watch', '--xml', 'testproject')
|
||||||
self.assertEqualMultiline(out, self._get_fixture('result-dirty.xml')+'\n'+self._get_fixture('result.xml')+'\n')
|
self.assertEqualMultiline(out, self._get_fixture('result-dirty.xml') + '\n' + self._get_fixture('result.xml') + '\n')
|
||||||
|
|
||||||
@GET('http://localhost/build/testproject/_result?package=python-MarkupSafe&multibuild=1&locallink=1', file='result.xml')
|
@GET('http://localhost/build/testproject/_result?package=python-MarkupSafe&multibuild=1&locallink=1', file='result.xml')
|
||||||
def testResults(self):
|
def testResults(self):
|
||||||
@ -48,9 +49,8 @@ class TestResults(OscTestCase):
|
|||||||
@GET('http://localhost/build/testproject/_result?package=python-MarkupSafe&oldstate=c57e2ee592dbbf26ebf19cc4f1bc1e83&multibuild=1&locallink=1', file='result.xml')
|
@GET('http://localhost/build/testproject/_result?package=python-MarkupSafe&oldstate=c57e2ee592dbbf26ebf19cc4f1bc1e83&multibuild=1&locallink=1', file='result.xml')
|
||||||
def testResultsWatch(self):
|
def testResultsWatch(self):
|
||||||
out = self._run_osc('results', '--watch', '--xml', 'testproject', 'python-MarkupSafe')
|
out = self._run_osc('results', '--watch', '--xml', 'testproject', 'python-MarkupSafe')
|
||||||
self.assertEqualMultiline(out, self._get_fixture('result-dirty.xml')+self._get_fixture('result.xml'))
|
self.assertEqualMultiline(out, self._get_fixture('result-dirty.xml') + self._get_fixture('result.xml'))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -8,10 +9,11 @@ from .common import OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'revertfile_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'revertfile_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestRevertFiles)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestRevertFiles)
|
||||||
|
|
||||||
|
|
||||||
class TestRevertFiles(OscTestCase):
|
class TestRevertFiles(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -95,6 +97,6 @@ class TestRevertFiles(OscTestCase):
|
|||||||
self.assertTrue(os.path.exists(storefile))
|
self.assertTrue(os.path.exists(storefile))
|
||||||
self.assertFilesEqual(fname, storefile)
|
self.assertFilesEqual(fname, storefile)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -8,13 +9,14 @@ from .common import GET, PUT, OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'setlinkrev_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'setlinkrev_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestSetLinkRev)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestSetLinkRev)
|
||||||
|
|
||||||
|
|
||||||
class TestSetLinkRev(OscTestCase):
|
class TestSetLinkRev(OscTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
OscTestCase.setUp(self, copytree=False)
|
super().setUp(copytree=False)
|
||||||
|
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -89,6 +91,6 @@ class TestSetLinkRev(OscTestCase):
|
|||||||
"""delete non existent rev attribute from link xml"""
|
"""delete non existent rev attribute from link xml"""
|
||||||
osc.core.set_link_rev('http://localhost', 'osctest', 'simple', revision=None)
|
osc.core.set_link_rev('http://localhost', 'osctest', 'simple', revision=None)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
import osc.core
|
import osc.core
|
||||||
import osc.oscerr
|
import osc.oscerr
|
||||||
@ -9,10 +10,11 @@ from .common import GET, OscTestCase
|
|||||||
|
|
||||||
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'update_fixtures')
|
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'update_fixtures')
|
||||||
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
import unittest
|
|
||||||
return unittest.defaultTestLoader.loadTestsFromTestCase(TestUpdate)
|
return unittest.defaultTestLoader.loadTestsFromTestCase(TestUpdate)
|
||||||
|
|
||||||
|
|
||||||
class TestUpdate(OscTestCase):
|
class TestUpdate(OscTestCase):
|
||||||
def _get_fixtures_dir(self):
|
def _get_fixtures_dir(self):
|
||||||
return FIXTURES_DIR
|
return FIXTURES_DIR
|
||||||
@ -62,7 +64,7 @@ class TestUpdate(OscTestCase):
|
|||||||
@GET('http://localhost/source/osctest/simple/_meta', file='meta.xml')
|
@GET('http://localhost/source/osctest/simple/_meta', file='meta.xml')
|
||||||
def testUpdateUpstreamModifiedFile(self):
|
def testUpdateUpstreamModifiedFile(self):
|
||||||
"""a file was modified in the remote package (local file isn't modified)"""
|
"""a file was modified in the remote package (local file isn't modified)"""
|
||||||
|
|
||||||
self._change_to_pkg('simple')
|
self._change_to_pkg('simple')
|
||||||
osc.core.Package('.').update(rev=2)
|
osc.core.Package('.').update(rev=2)
|
||||||
exp = 'U foo\nAt revision 2.\n'
|
exp = 'U foo\nAt revision 2.\n'
|
||||||
@ -285,6 +287,6 @@ class TestUpdate(OscTestCase):
|
|||||||
self.assertFalse(os.path.exists(os.path.join('.osc', 'added')))
|
self.assertFalse(os.path.exists(os.path.join('.osc', 'added')))
|
||||||
self._check_digests('testUpdateResumeDeletedFile_files')
|
self._check_digests('testUpdateResumeDeletedFile_files')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import unittest
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
Loading…
Reference in New Issue
Block a user