1
0
mirror of https://github.com/openSUSE/osc.git synced 2025-01-13 17:16:23 +01:00

Modernize code with pyupgrade

pyupgrade --keep-percent-format --py36-plus `find -name '*.py'`
This commit is contained in:
Daniel Mach 2022-07-28 19:11:29 +02:00
parent e7af9cebb3
commit feb53212dd
38 changed files with 126 additions and 138 deletions

View File

@ -135,7 +135,7 @@ def run(prg, argv=None):
if 'tlsv1' in str(e): if 'tlsv1' in str(e):
print('The python on this system or the server does not support TLSv1.2', file=sys.stderr) print('The python on this system or the server does not support TLSv1.2', file=sys.stderr)
print("SSL Error:", e, file=sys.stderr) print("SSL Error:", e, file=sys.stderr)
except IOError as e: except OSError as e:
# ignore broken pipe # ignore broken pipe
if e.errno != errno.EPIPE: if e.errno != errno.EPIPE:
raise raise

View File

@ -499,8 +499,8 @@ def get_prefer_pkgs(dirs, wanted_arch, type, cpio):
packageQuery = packagequery.PackageQuery.query(path) packageQuery = packagequery.PackageQuery.query(path)
packageQueries.add(packageQuery) packageQueries.add(packageQuery)
prefer_pkgs = dict((decode_it(name), packageQuery.path()) prefer_pkgs = {decode_it(name): packageQuery.path()
for name, packageQuery in packageQueries.items()) for name, packageQuery in packageQueries.items()}
depfile = create_deps(packageQueries.values()) depfile = create_deps(packageQueries.values())
cpio.add(b'deps', b'\n'.join(depfile)) cpio.add(b'deps', b'\n'.join(depfile))

View File

@ -50,7 +50,7 @@ class Checker:
if file in self.imported: if file in self.imported:
return return
fd = open(file, "r") fd = open(file)
line = fd.readline() line = fd.readline()
if line and line[0:14] == "-----BEGIN PGP": if line and line[0:14] == "-----BEGIN PGP":
line = fd.readline() line = fd.readline()

View File

@ -3,7 +3,6 @@
# Author: Trent Mick (TrentM@ActiveState.com) # Author: Trent Mick (TrentM@ActiveState.com)
# Home: http://trentm.com/projects/cmdln/ # Home: http://trentm.com/projects/cmdln/
from __future__ import print_function
"""An improvement on Python's standard cmd.py module. """An improvement on Python's standard cmd.py module.
@ -1289,7 +1288,7 @@ def _format_linedata(linedata, indent, indent_width):
SPACING = 3 SPACING = 3
MAX_NAME_WIDTH = 15 MAX_NAME_WIDTH = 15
NAME_WIDTH = min(max([len(s) for s, d in linedata]), MAX_NAME_WIDTH) NAME_WIDTH = min(max(len(s) for s, d in linedata), MAX_NAME_WIDTH)
DOC_WIDTH = WIDTH - NAME_WIDTH - SPACING DOC_WIDTH = WIDTH - NAME_WIDTH - SPACING
for namestr, doc in linedata: for namestr, doc in linedata:
line = indent + namestr line = indent + namestr

View File

@ -4533,9 +4533,9 @@ Please submit there instead, or use --nodevelproject to force direct submission.
obs_url = node.text obs_url = node.text
if package is None: if package is None:
url = "{}/project/show/{}".format(obs_url, project) url = f"{obs_url}/project/show/{project}"
else: else:
url = "{}/package/show/{}/{}".format(obs_url, project, package) url = f"{obs_url}/package/show/{project}/{package}"
run_external('xdg-open', url) run_external('xdg-open', url)
@ -4986,7 +4986,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
prj = Project(arg) prj = Project(arg)
if prj.scm_url: if prj.scm_url:
print("WARNING: Skipping project '{}' because it is managed in scm (git): {}".format(prj.name, prj.scm_url)) print(f"WARNING: Skipping project '{prj.name}' because it is managed in scm (git): {prj.scm_url}")
args.remove(arg) args.remove(arg)
continue continue
@ -5010,7 +5010,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
for pac in pacs.copy(): for pac in pacs.copy():
if pac.scm_url: if pac.scm_url:
print("WARNING: Skipping package '{}' because it is managed in scm (git): {}".format(pac.name, pac.scm_url)) print(f"WARNING: Skipping package '{pac.name}' because it is managed in scm (git): {pac.scm_url}")
pacs.remove(pac) pacs.remove(pac)
continue continue
@ -6251,10 +6251,10 @@ Please submit there instead, or use --nodevelproject to force direct submission.
constraintsfile_data = None constraintsfile_data = None
if constraintsfile is not None: if constraintsfile is not None:
constraintsfile_data = open(constraintsfile, 'r').read() constraintsfile_data = open(constraintsfile).read()
elif not opts.ignore_file: elif not opts.ignore_file:
if os.path.isfile("_constraints"): if os.path.isfile("_constraints"):
constraintsfile_data = open("_constraints", 'r').read() constraintsfile_data = open("_constraints").read()
else: else:
print("No local _constraints file. Using just the project constraints") print("No local _constraints file. Using just the project constraints")
@ -6390,7 +6390,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
Repo.tofile(repolistfile, repositories) Repo.tofile(repolistfile, repositories)
no_repo = False no_repo = False
repo_names = sorted(set([r.name for r in repositories])) repo_names = sorted({r.name for r in repositories})
if not arg_repository and repositories: if not arg_repository and repositories:
# XXX: we should avoid hardcoding repository names # XXX: we should avoid hardcoding repository names
# Use a default value from config, but just even if it's available # Use a default value from config, but just even if it's available
@ -7919,8 +7919,8 @@ Please submit there instead, or use --nodevelproject to force direct submission.
# XXX: is it a good idea to make this the default? # XXX: is it a good idea to make this the default?
# support perl symbols: # support perl symbols:
if re.match('^perl\(\w+(::\w+)*\)$', search_term): if re.match(r'^perl\(\w+(::\w+)*\)$', search_term):
search_term = re.sub('\)', '', re.sub('(::|\()', '-', search_term)) search_term = re.sub(r'\)', '', re.sub(r'(::|\()', '-', search_term))
opts.package = True opts.package = True
if opts.mine: if opts.mine:
@ -7941,7 +7941,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
if opts.binary and (opts.title or opts.description or opts.involved or opts.bugowner or opts.maintainer if opts.binary and (opts.title or opts.description or opts.involved or opts.bugowner or opts.maintainer
or opts.project or opts.package): or opts.project or opts.package):
raise oscerr.WrongOptions('Sorry, \'--binary\' and \'--title\' or \'--description\' or \'--involved ' \ raise oscerr.WrongOptions('Sorry, \'--binary\' and \'--title\' or \'--description\' or \'--involved ' \
'or \'--bugowner\' or \'--maintainer\' or \'--limit-to-attribute <attr>\ ' \ 'or \'--bugowner\' or \'--maintainer\' or \'--limit-to-attribute <attr>\\ ' \
'or \'--project\' or \'--package\' are mutually exclusive') 'or \'--project\' or \'--package\' are mutually exclusive')
apiurl = self.get_api_url() apiurl = self.get_api_url()
@ -8009,7 +8009,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
# backward compatibility: local role filtering # backward compatibility: local role filtering
if opts.limit_to_attribute: if opts.limit_to_attribute:
role_filter_xpath = xpath_join(role_filter_xpath, 'attribute/@name=\'%s\'' % opts.limit_to_attribute, op='and') role_filter_xpath = xpath_join(role_filter_xpath, 'attribute/@name=\'%s\'' % opts.limit_to_attribute, op='and')
what = dict([[kind, role_filter_xpath] for kind in what.keys()]) what = {kind: role_filter_xpath for kind in what.keys()}
res = search(apiurl, **what) res = search(apiurl, **what)
filter_role(res, search_term, role_filter) filter_role(res, search_term, role_filter)
if role_filter: if role_filter:
@ -9189,7 +9189,7 @@ Please submit there instead, or use --nodevelproject to force direct submission.
fp = open(fn_changelog) fp = open(fn_changelog)
titleline = fp.readline() titleline = fp.readline()
fp.close() fp.close()
if re.match('^\*\W+(.+\W+\d{1,2}\W+20\d{2})\W+(.+)\W+<(.+)>\W+(.+)$', titleline): if re.match(r'^\*\W+(.+\W+\d{1,2}\W+20\d{2})\W+(.+)\W+<(.+)>\W+(.+)$', titleline):
meego_style = True meego_style = True
except IndexError: except IndexError:
pass pass

View File

@ -469,7 +469,7 @@ class SignatureAuthHandler(AuthHandlerBase):
def is_ssh_private_keyfile(self, keyfile_path): def is_ssh_private_keyfile(self, keyfile_path):
if not os.path.isfile(keyfile_path): if not os.path.isfile(keyfile_path):
return False return False
with open(keyfile_path, "r") as f: with open(keyfile_path) as f:
try: try:
line = f.readline(100).strip() line = f.readline(100).strip()
except UnicodeDecodeError: except UnicodeDecodeError:
@ -490,7 +490,7 @@ class SignatureAuthHandler(AuthHandlerBase):
def get_fingerprint(line): def get_fingerprint(line):
parts = line.strip().split(b" ") parts = line.strip().split(b" ")
if len(parts) < 2: if len(parts) < 2:
raise ValueError("Unable to retrieve ssh key fingerprint from line: {}".format(line)) raise ValueError(f"Unable to retrieve ssh key fingerprint from line: {line}")
return parts[1] return parts[1]
def list_ssh_dir_keys(self): def list_ssh_dir_keys(self):

View File

@ -1261,7 +1261,7 @@ class Package:
state = '?' state = '?'
try: try:
state = self.status(n) state = self.status(n)
except IOError as ioe: except OSError as ioe:
if not force: if not force:
raise ioe raise ioe
if state in ['?', 'A', 'M', 'R', 'C'] and not force: if state in ['?', 'A', 'M', 'R', 'C'] and not force:
@ -1450,7 +1450,7 @@ class Package:
if self.is_link_to_different_project(): if self.is_link_to_different_project():
if can_branch: if can_branch:
orgprj = self.get_local_origin_project() orgprj = self.get_local_origin_project()
print("Branching {} from {} to {}".format(self.name, orgprj, self.prjname)) print(f"Branching {self.name} from {orgprj} to {self.prjname}")
exists, targetprj, targetpkg, srcprj, srcpkg = branch_pkg( exists, targetprj, targetpkg, srcprj, srcpkg = branch_pkg(
self.apiurl, orgprj, self.name, target_project=self.prjname) self.apiurl, orgprj, self.name, target_project=self.prjname)
# update _meta and _files to sychronize the local package # update _meta and _files to sychronize the local package
@ -1458,7 +1458,7 @@ class Package:
self.update_local_pacmeta() self.update_local_pacmeta()
self.update_local_filesmeta() self.update_local_filesmeta()
else: else:
print("{} Not commited because is link to a different project".format(self.name)) print(f"{self.name} Not commited because is link to a different project")
return 1 return 1
if not self.todo: if not self.todo:
@ -1525,7 +1525,7 @@ class Package:
sfilelist = self.__send_commitlog(msg, filelist, validate=True) sfilelist = self.__send_commitlog(msg, filelist, validate=True)
hash_entries = [e for e in sfilelist.findall('entry') if e.get('hash') is not None] hash_entries = [e for e in sfilelist.findall('entry') if e.get('hash') is not None]
if sfilelist.get('error') and hash_entries: if sfilelist.get('error') and hash_entries:
name2elem = dict([(e.get('name'), e) for e in filelist.findall('entry')]) name2elem = {e.get('name'): e for e in filelist.findall('entry')}
for entry in hash_entries: for entry in hash_entries:
filename = entry.get('name') filename = entry.get('name')
fileelem = name2elem.get(filename) fileelem = name2elem.get(filename)
@ -1846,7 +1846,7 @@ class Package:
def get_pulled_srcmd5(self): def get_pulled_srcmd5(self):
pulledrev = None pulledrev = None
for line in open(os.path.join(self.storedir, '_pulled'), 'r'): for line in open(os.path.join(self.storedir, '_pulled')):
pulledrev = line.strip() pulledrev = line.strip()
return pulledrev return pulledrev
@ -2764,7 +2764,7 @@ class Action:
not action_node.get('type') in Action.type_args.keys() or \ not action_node.get('type') in Action.type_args.keys() or \
not action_node.tag in ('action', 'submit'): not action_node.tag in ('action', 'submit'):
raise oscerr.WrongArgs('invalid argument') raise oscerr.WrongArgs('invalid argument')
elm_to_prefix = dict([(i[1], i[0]) for i in Action.prefix_to_elm.items()]) elm_to_prefix = {i[1]: i[0] for i in Action.prefix_to_elm.items()}
kwargs = {} kwargs = {}
for node in action_node: for node in action_node:
prefix = elm_to_prefix.get(node.tag, node.tag) prefix = elm_to_prefix.get(node.tag, node.tag)
@ -3277,7 +3277,7 @@ def store_readlist(dir, name):
r = [] r = []
if os.path.exists(os.path.join(dir, store, name)): if os.path.exists(os.path.join(dir, store, name)):
r = [line.rstrip('\n') for line in open(os.path.join(dir, store, name), 'r')] r = [line.rstrip('\n') for line in open(os.path.join(dir, store, name))]
return r return r
def read_tobeadded(dir): def read_tobeadded(dir):
@ -3438,7 +3438,7 @@ def meta_get_project_list(apiurl, deleted=None):
u = makeurl(apiurl, ['source'], query) u = makeurl(apiurl, ['source'], query)
f = http_GET(u) f = http_GET(u)
root = ET.parse(f).getroot() root = ET.parse(f).getroot()
return sorted([ node.get('name') for node in root if node.get('name')]) return sorted( node.get('name') for node in root if node.get('name'))
def show_project_meta(apiurl, prj, rev=None, blame=None): def show_project_meta(apiurl, prj, rev=None, blame=None):
@ -3607,7 +3607,7 @@ def show_pattern_metalist(apiurl, prj):
except HTTPError as e: except HTTPError as e:
e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
raise raise
r = sorted([ node.get('name') for node in tree.getroot() ]) r = sorted( node.get('name') for node in tree.getroot() )
return r return r
@ -3807,7 +3807,6 @@ def parse_meta_to_string(data):
""" """
# data can be a bytes object, a list with strings, a list with bytes, just a string. # data can be a bytes object, a list with strings, a list with bytes, just a string.
# So we need the following even if it is ugly. # So we need the following even if it is ugly.
if sys.version_info >= (3, 0):
if isinstance(data, bytes): if isinstance(data, bytes):
data = decode_it(data) data = decode_it(data)
elif isinstance(data, list): elif isinstance(data, list):
@ -4000,13 +3999,13 @@ def read_meta_from_spec(specfile, *args):
else: else:
tags.append(itm) tags.append(itm)
tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)' tag_pat = r'(?P<tag>^%s)\s*:\s*(?P<val>.*)'
for tag in tags: for tag in tags:
m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines)) m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
if m and m.group('val'): if m and m.group('val'):
spec_data[tag] = m.group('val').strip() spec_data[tag] = m.group('val').strip()
section_pat = '^%s\s*?$' section_pat = r'^%s\s*?$'
for section in sections: for section in sections:
m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines)) m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
if m: if m:
@ -4789,11 +4788,6 @@ def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None,
from_file = b'%s\t(revision %s)' % (origfilename.encode(), str(rev).encode()) from_file = b'%s\t(revision %s)' % (origfilename.encode(), str(rev).encode())
to_file = b'%s\t(working copy)' % origfilename.encode() to_file = b'%s\t(working copy)' % origfilename.encode()
if sys.version_info < (3, 0):
d = difflib.unified_diff(s1, s2,
fromfile = from_file, \
tofile = to_file)
else:
d = difflib.diff_bytes(difflib.unified_diff, s1, s2, \ d = difflib.diff_bytes(difflib.unified_diff, s1, s2, \
fromfile = from_file, \ fromfile = from_file, \
tofile = to_file) tofile = to_file)
@ -5605,7 +5599,7 @@ def get_platforms(apiurl):
def get_repositories(apiurl): def get_repositories(apiurl):
f = http_GET(makeurl(apiurl, ['platform'])) f = http_GET(makeurl(apiurl, ['platform']))
tree = ET.parse(f) tree = ET.parse(f)
r = sorted([ node.get('name') for node in tree.getroot() ]) r = sorted( node.get('name') for node in tree.getroot() )
return r return r
@ -5694,7 +5688,7 @@ class Repo:
if not os.path.exists(filename): if not os.path.exists(filename):
return [] return []
repos = [] repos = []
lines = open(filename, 'r').readlines() lines = open(filename).readlines()
for line in lines: for line in lines:
data = line.split() data = line.split()
if len(data) == 2: if len(data) == 2:
@ -6191,10 +6185,10 @@ def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=
def buildlog_strip_time(data): def buildlog_strip_time(data):
"""Strips the leading build time from the log""" """Strips the leading build time from the log"""
if isinstance(data, str): if isinstance(data, str):
time_regex = re.compile('^\[[^\]]*\] ', re.M) time_regex = re.compile(r'^\[[^\]]*\] ', re.M)
return time_regex.sub('', data) return time_regex.sub('', data)
else: else:
time_regex = re.compile(b'^\[[^\]]*\] ', re.M) time_regex = re.compile(br'^\[[^\]]*\] ', re.M)
return time_regex.sub(b'', data) return time_regex.sub(b'', data)
@ -6210,10 +6204,7 @@ def print_buildlog(apiurl, prj, package, repository, arch, offset=0, strip_time=
# to protect us against control characters # to protect us against control characters
import string import string
if sys.version_info >= (3, 0):
all_bytes = bytes.maketrans(b'', b'') all_bytes = bytes.maketrans(b'', b'')
else:
all_bytes = string.maketrans(b'', b'')
remove_bytes = all_bytes[:8] + all_bytes[14:32] # accept tabs and newlines remove_bytes = all_bytes[:8] + all_bytes[14:32] # accept tabs and newlines
query = {'nostream' : '1', 'start' : '%s' % offset} query = {'nostream' : '1', 'start' : '%s' % offset}
@ -6463,7 +6454,7 @@ def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = False,
r.append('</logentry>') r.append('</logentry>')
else: else:
if requestid: if requestid:
requestid = decode_it((b"rq" + requestid)) requestid = decode_it(b"rq" + requestid)
s = '-' * 76 + \ s = '-' * 76 + \
'\nr%s | %s | %s | %s | %s | %s\n' % (rev, user, t, srcmd5, version, requestid) + \ '\nr%s | %s | %s | %s | %s | %s\n' % (rev, user, t, srcmd5, version, requestid) + \
'\n' + decode_it(comment) '\n' + decode_it(comment)
@ -6544,7 +6535,7 @@ def store_read_project(dir):
try: try:
p = open(os.path.join(dir, store, '_project')).readlines()[0].strip() p = open(os.path.join(dir, store, '_project')).readlines()[0].strip()
except IOError: except OSError:
msg = 'Error: \'%s\' is not an osc project dir or working copy' % os.path.abspath(dir) msg = 'Error: \'%s\' is not an osc project dir or working copy' % os.path.abspath(dir)
if os.path.exists(os.path.join(dir, '.svn')): if os.path.exists(os.path.join(dir, '.svn')):
msg += '\nTry svn instead of osc.' msg += '\nTry svn instead of osc.'
@ -6557,7 +6548,7 @@ def store_read_package(dir):
try: try:
p = open(os.path.join(dir, store, '_package')).readlines()[0].strip() p = open(os.path.join(dir, store, '_package')).readlines()[0].strip()
except IOError: except OSError:
msg = 'Error: \'%s\' is not an osc package working copy' % os.path.abspath(dir) msg = 'Error: \'%s\' is not an osc package working copy' % os.path.abspath(dir)
if os.path.exists(os.path.join(dir, '.svn')): if os.path.exists(os.path.join(dir, '.svn')):
msg += '\nTry svn instead of osc.' msg += '\nTry svn instead of osc.'
@ -6572,7 +6563,7 @@ def store_read_scmurl(dir):
return return
try: try:
p = open(url_file).readlines()[0].strip() p = open(url_file).readlines()[0].strip()
except IOError: except OSError:
msg = 'Error: \'%s\' is not an osc package working copy' % os.path.abspath(dir) msg = 'Error: \'%s\' is not an osc package working copy' % os.path.abspath(dir)
if os.path.exists(os.path.join(dir, '.svn')): if os.path.exists(os.path.join(dir, '.svn')):
msg += '\nTry svn instead of osc.' msg += '\nTry svn instead of osc.'
@ -6988,7 +6979,7 @@ def unpack_srcrpm(srpm, dir, *files):
if os.path.isdir(dir): if os.path.isdir(dir):
os.chdir(dir) os.chdir(dir)
ret = -1 ret = -1
with open(srpm, 'r') as fsrpm: with open(srpm) as fsrpm:
with open(os.devnull, 'w') as devnull: with open(os.devnull, 'w') as devnull:
rpm2cpio_proc = subprocess.Popen(['rpm2cpio'], stdin=fsrpm, rpm2cpio_proc = subprocess.Popen(['rpm2cpio'], stdin=fsrpm,
stdout=subprocess.PIPE) stdout=subprocess.PIPE)
@ -7387,7 +7378,7 @@ def get_commit_message_template(pac):
diff += get_source_file_diff(pac.absdir, filename, pac.rev) diff += get_source_file_diff(pac.absdir, filename, pac.rev)
elif pac.status(filename) == 'A': elif pac.status(filename) == 'A':
with open(os.path.join(pac.absdir, filename), 'rb') as f: with open(os.path.join(pac.absdir, filename), 'rb') as f:
diff.extend((b'+' + line for line in f)) diff.extend(b'+' + line for line in f)
if diff: if diff:
template = parse_diff_for_commit_message(''.join(decode_list(diff))) template = parse_diff_for_commit_message(''.join(decode_list(diff)))
@ -7451,7 +7442,7 @@ def print_request_list(apiurl, project, package = None, states = ('new', 'review
requests = get_request_list(apiurl, project, package, req_state=states) requests = get_request_list(apiurl, project, package, req_state=states)
msg = '\nPending requests for %s: %s (%s)' msg = '\nPending requests for %s: %s (%s)'
if sys.stdout.isatty(): if sys.stdout.isatty():
msg = '\033[1m{}\033[0m'.format(msg) msg = f'\033[1m{msg}\033[0m'
if package is None and len(requests): if package is None and len(requests):
print(msg % ('project', project, len(requests))) print(msg % ('project', project, len(requests)))
elif len(requests): elif len(requests):
@ -7632,7 +7623,7 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None,
prompt = 'd(i)ff/(a)ccept/(b)uildstatus/(e)dit/(s)kip/(c)ancel > ' prompt = 'd(i)ff/(a)ccept/(b)uildstatus/(e)dit/(s)kip/(c)ancel > '
else: else:
state_map = {'a': 'accepted', 'd': 'declined', 'r': 'revoked'} state_map = {'a': 'accepted', 'd': 'declined', 'r': 'revoked'}
mo = re.search('^([adrl])(?:\s+(-f)?\s*-m\s+(.*))?$', repl) mo = re.search(r'^([adrl])(?:\s+(-f)?\s*-m\s+(.*))?$', repl)
if mo is None or orequest and mo.group(1) != 'a': if mo is None or orequest and mo.group(1) != 'a':
print('invalid choice: \'%s\'' % repl, file=sys.stderr) print('invalid choice: \'%s\'' % repl, file=sys.stderr)
continue continue
@ -7808,7 +7799,7 @@ def get_user_projpkgs(apiurl, user, role=None, exclude_projects=[], proj=True, p
if e.code != 400 or not role_filter_xpath: if e.code != 400 or not role_filter_xpath:
raise e raise e
# backward compatibility: local role filtering # backward compatibility: local role filtering
what = dict([[kind, role_filter_xpath] for kind in what.keys()]) what = {kind: role_filter_xpath for kind in what.keys()}
if 'package' in what: if 'package' in what:
what['package'] = xpath_join(role_filter_xpath, excl_pkg, op='and') what['package'] = xpath_join(role_filter_xpath, excl_pkg, op='and')
if 'project' in what: if 'project' in what:
@ -8053,9 +8044,9 @@ class MultibuildFlavorResolver:
# use local _multibuild file # use local _multibuild file
if self.use_local: if self.use_local:
try: try:
with open("_multibuild", "r") as f: with open("_multibuild") as f:
return f.read() return f.read()
except IOError as e: except OSError as e:
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
raise raise
return None return None

View File

@ -29,7 +29,7 @@ from . import conf
from . import oscerr from . import oscerr
class _LazyPassword(object): class _LazyPassword:
def __init__(self, pwfunc): def __init__(self, pwfunc):
self._pwfunc = pwfunc self._pwfunc = pwfunc
self._password = None self._password = None
@ -59,7 +59,7 @@ class _LazyPassword(object):
return getattr(str(self), name) return getattr(str(self), name)
class AbstractCredentialsManagerDescriptor(object): class AbstractCredentialsManagerDescriptor:
def name(self): def name(self):
raise NotImplementedError() raise NotImplementedError()
@ -78,11 +78,11 @@ class AbstractCredentialsManagerDescriptor(object):
return (-self.priority(), self.name()) < (-other.priority(), other.name()) return (-self.priority(), self.name()) < (-other.priority(), other.name())
class AbstractCredentialsManager(object): class AbstractCredentialsManager:
config_entry = 'credentials_mgr_class' config_entry = 'credentials_mgr_class'
def __init__(self, cp, options): def __init__(self, cp, options):
super(AbstractCredentialsManager, self).__init__() super().__init__()
self._cp = cp self._cp = cp
self._process_options(options) self._process_options(options)
@ -229,7 +229,7 @@ class KeyringCredentialsManager(AbstractCredentialsManager):
try: try:
keyring_backend = keyring.core.load_keyring(self._backend_cls_name) keyring_backend = keyring.core.load_keyring(self._backend_cls_name)
except ModuleNotFoundError: except ModuleNotFoundError:
msg = "Invalid credentials_mgr_class: {}".format(self._backend_cls_name) msg = f"Invalid credentials_mgr_class: {self._backend_cls_name}"
raise oscerr.ConfigError(msg, conf.config['conffile']) raise oscerr.ConfigError(msg, conf.config['conffile'])
keyring.set_keyring(keyring_backend) keyring.set_keyring(keyring_backend)
@ -415,7 +415,7 @@ def create_credentials_manager(url, cp):
try: try:
creds_mgr = getattr(importlib.import_module(mod), cls).create(cp, options) creds_mgr = getattr(importlib.import_module(mod), cls).create(cp, options)
except ModuleNotFoundError: except ModuleNotFoundError:
msg = "Invalid credentials_mgr_class: {}".format(creds_mgr_cls) msg = f"Invalid credentials_mgr_class: {creds_mgr_cls}"
raise oscerr.ConfigError(msg, conf.config['conffile']) raise oscerr.ConfigError(msg, conf.config['conffile'])
return creds_mgr return creds_mgr

View File

@ -71,9 +71,9 @@ class Fetcher:
raise oscerr.APIError('CPIO archive is incomplete ' raise oscerr.APIError('CPIO archive is incomplete '
'(see .errors file)') '(see .errors file)')
if package == '_repository': if package == '_repository':
n = re.sub(b'\.pkg\.tar\.(zst|.z)$', b'.arch', hdr.filename) n = re.sub(br'\.pkg\.tar\.(zst|.z)$', b'.arch', hdr.filename)
if n.startswith(b'container:'): if n.startswith(b'container:'):
n = re.sub(b'\.tar\.(zst|.z)$', b'.tar', hdr.filename) n = re.sub(br'\.tar\.(zst|.z)$', b'.tar', hdr.filename)
pac = pkgs[decode_it(n.rsplit(b'.', 1)[0])] pac = pkgs[decode_it(n.rsplit(b'.', 1)[0])]
pac.canonname = hdr.filename pac.canonname = hdr.filename
else: else:
@ -108,10 +108,10 @@ class Fetcher:
raise oscerr.APIError('unable to fetch cpio archive: ' raise oscerr.APIError('unable to fetch cpio archive: '
'server always returns code 414') 'server always returns code 414')
n = int(len(pkgs) / 2) n = int(len(pkgs) / 2)
new_pkgs = dict([(k, pkgs[k]) for k in keys[:n]]) new_pkgs = {k: pkgs[k] for k in keys[:n]}
self.__download_cpio_archive(apiurl, project, repo, arch, self.__download_cpio_archive(apiurl, project, repo, arch,
package, **new_pkgs) package, **new_pkgs)
new_pkgs = dict([(k, pkgs[k]) for k in keys[n:]]) new_pkgs = {k: pkgs[k] for k in keys[n:]}
self.__download_cpio_archive(apiurl, project, repo, arch, self.__download_cpio_archive(apiurl, project, repo, arch,
package, **new_pkgs) package, **new_pkgs)

View File

@ -14,7 +14,7 @@ from urllib.error import URLError
from .core import streamfile from .core import streamfile
class OscFileGrabber(object): class OscFileGrabber:
def __init__(self, progress_obj=None): def __init__(self, progress_obj=None):
self.progress_obj = progress_obj self.progress_obj = progress_obj
@ -28,7 +28,7 @@ class OscFileGrabber(object):
f.write(i) f.write(i)
class OscMirrorGroup(object): class OscMirrorGroup:
def __init__(self, grabber, mirrors): def __init__(self, grabber, mirrors):
self._grabber = grabber self._grabber = grabber
self._mirrors = mirrors self._mirrors = mirrors

View File

@ -13,7 +13,7 @@ except ImportError:
have_pb_module = False have_pb_module = False
class PBTextMeter(object): class PBTextMeter:
def start(self, basename, size=None): def start(self, basename, size=None):
if size is None: if size is None:
@ -40,7 +40,7 @@ class PBTextMeter(object):
self.bar.finish() self.bar.finish()
class NoPBTextMeter(object): class NoPBTextMeter:
_complained = False _complained = False
def start(self, basename, size=None): def start(self, basename, size=None):

View File

@ -23,7 +23,7 @@ class ConfigError(OscBaseError):
self.file = fname self.file = fname
def __str__(self): def __str__(self):
return "Error in config file {}\n {}".format(self.file, self.msg) return f"Error in config file {self.file}\n {self.msg}"
class ConfigMissingApiurl(ConfigError): class ConfigMissingApiurl(ConfigError):
"""Exception raised when a apiurl does not exist in the config file""" """Exception raised when a apiurl does not exist in the config file"""
@ -50,7 +50,7 @@ class NoConfigfile(OscBaseError):
self.msg = msg self.msg = msg
def __str__(self): def __str__(self):
return "Config file cannot be found: {}\n {}".format(self.file, self.msg) return f"Config file cannot be found: {self.file}\n {self.msg}"
class ExtRuntimeError(OscBaseError): class ExtRuntimeError(OscBaseError):
"""Exception raised when there is a runtime error of an external tool""" """Exception raised when there is a runtime error of an external tool"""

View File

@ -162,8 +162,8 @@ class ArchQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
if not (len(ver1) and len(ver2)): if not (len(ver1) and len(ver2)):
break break
# check if we have a digits segment # check if we have a digits segment
mo1 = re.match(b'(\d+)', ver1) mo1 = re.match(br'(\d+)', ver1)
mo2 = re.match(b'(\d+)', ver2) mo2 = re.match(br'(\d+)', ver2)
numeric = True numeric = True
if mo1 is None: if mo1 is None:
mo1 = re.match(b'([a-zA-Z]+)', ver1) mo1 = re.match(b'([a-zA-Z]+)', ver1)

View File

@ -100,8 +100,7 @@ class CpioRead:
self.__file.close() self.__file.close()
def __iter__(self): def __iter__(self):
for h in self.hdrs: yield from self.hdrs
yield h
def _init_datastructs(self): def _init_datastructs(self):
self.hdrs = [] self.hdrs = []

View File

@ -89,9 +89,9 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
def __parse_control(self, control, all_tags=False, self_provides=True, *extra_tags): def __parse_control(self, control, all_tags=False, self_provides=True, *extra_tags):
data = control.readline().strip() data = control.readline().strip()
while data: while data:
field, val = re.split(b':\s*', data.strip(), 1) field, val = re.split(br':\s*', data.strip(), 1)
data = control.readline() data = control.readline()
while data and re.match(b'\s+', data): while data and re.match(br'\s+', data):
val += b'\n' + data.strip() val += b'\n' + data.strip()
data = control.readline().rstrip() data = control.readline().rstrip()
field = field.replace(b'-', b'_').lower() field = field.replace(b'-', b'_').lower()
@ -122,7 +122,7 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
# add self provides entry # add self provides entry
self.fields[b'provides'].append(b'%s (= %s)' % (self.name(), b'-'.join(versrel))) self.fields[b'provides'].append(b'%s (= %s)' % (self.name(), b'-'.join(versrel)))
def _split_field_value(self, field, delimeter=b',\s*'): def _split_field_value(self, field, delimeter=br',\s*'):
return [i.strip() return [i.strip()
for i in re.split(delimeter, self.fields.get(field, b'')) if i] for i in re.split(delimeter, self.fields.get(field, b'')) if i]
@ -203,8 +203,8 @@ class DebQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
""" """
# 32 is arbitrary - it is needed for the "longer digit string wins" handling # 32 is arbitrary - it is needed for the "longer digit string wins" handling
# (found this nice approach in Build/Deb.pm (build package)) # (found this nice approach in Build/Deb.pm (build package))
ver1 = re.sub(b'(\d+)', lambda m: (32 * b'0' + m.group(1))[-32:], ver1) ver1 = re.sub(br'(\d+)', lambda m: (32 * b'0' + m.group(1))[-32:], ver1)
ver2 = re.sub(b'(\d+)', lambda m: (32 * b'0' + m.group(1))[-32:], ver2) ver2 = re.sub(br'(\d+)', lambda m: (32 * b'0' + m.group(1))[-32:], ver2)
vers = itertools.zip_longest(ver1, ver2, fillvalue=b'') vers = itertools.zip_longest(ver1, ver2, fillvalue=b'')
for v1, v2 in vers: for v1, v2 in vers:
if v1 == v2: if v1 == v2:

View File

@ -72,11 +72,11 @@ def get_version(version):
# removing "~" because it is not an allowed character in git tags # removing "~" because it is not an allowed character in git tags
# and also because the normalized form is (for example) 1.0.0b0 # and also because the normalized form is (for example) 1.0.0b0
if version and git_tag != version.replace("~", ""): if version and git_tag != version.replace("~", ""):
msg = "Git tag '{}' doesn't correspond with version '{}' specified in the source code".format(git_tag, version) msg = f"Git tag '{git_tag}' doesn't correspond with version '{version}' specified in the source code"
raise ValueError(msg) raise ValueError(msg)
result = git_tag result = git_tag
if git_hash: if git_hash:
result += "+{}.git.{}".format(git_commits, git_hash) result += f"+{git_commits}.git.{git_hash}"
return result return result

View File

@ -19,7 +19,7 @@ class PackageQueries(dict):
def __init__(self, wanted_architecture): def __init__(self, wanted_architecture):
self.wanted_architecture = wanted_architecture self.wanted_architecture = wanted_architecture
super(PackageQueries, self).__init__() super().__init__()
def add(self, query): def add(self, query):
"""Adds package query to dict if it is of the correct architecture and """Adds package query to dict if it is of the correct architecture and
@ -43,7 +43,7 @@ class PackageQueries(dict):
# if current query does not exist or is older than this new query # if current query does not exist or is older than this new query
if current_query is None or current_query.vercmp(query) <= 0: if current_query is None or current_query.vercmp(query) <= 0:
super(PackageQueries, self).__setitem__(name, query) super().__setitem__(name, query)
class PackageQuery: class PackageQuery:
"""abstract base class for all package types""" """abstract base class for all package types"""

View File

@ -41,7 +41,7 @@ def primaryPath(directory):
primaryPath = os.path.join(directory, locationElement.get("href")) primaryPath = os.path.join(directory, locationElement.get("href"))
break break
else: else:
raise IOError("'%s' contains no primary location" % metaDataPath) raise OSError("'%s' contains no primary location" % metaDataPath)
return primaryPath return primaryPath

View File

@ -33,8 +33,7 @@ class RpmHeader:
return None return None
def __iter__(self): def __iter__(self):
for i in self.entries: yield from self.entries
yield i
def __len__(self): def __len__(self):
return len(self.entries) return len(self.entries)
@ -340,8 +339,8 @@ class RpmQuery(packagequery.PackageQuery, packagequery.PackageQueryResult):
break break
# check if we have a digits segment # check if we have a digits segment
mo1 = re.match('(\d+)', ver1) mo1 = re.match(r'(\d+)', ver1)
mo2 = re.match('(\d+)', ver2) mo2 = re.match(r'(\d+)', ver2)
numeric = True numeric = True
if mo1 is None: if mo1 is None:
mo1 = re.match('([a-zA-Z]+)', ver1) mo1 = re.match('([a-zA-Z]+)', ver1)

View File

@ -1,6 +1,6 @@
# be careful when debugging this code: # be careful when debugging this code:
# don't add print statements when setting sys.stdout = SafeWriter(sys.stdout)... # don't add print statements when setting sys.stdout = SafeWriter(sys.stdout)...
class SafeWriter(object): class SafeWriter:
""" """
Safely write an (unicode) str. In case of an "UnicodeEncodeError" the Safely write an (unicode) str. In case of an "UnicodeEncodeError" the
the str is encoded with the "encoding" encoding. the str is encoded with the "encoding" encoding.
@ -20,4 +20,4 @@ class SafeWriter(object):
return getattr(self._writer, name) return getattr(self._writer, name)
def __setattr__(self, name, value): def __setattr__(self, name, value):
super(SafeWriter, self).__setattr__(name, value) super().__setattr__(name, value)

View File

@ -11,7 +11,7 @@ import osc.commandline
import osc.core import osc.core
class build_osc(build.build, object): class build_osc(build.build):
""" """
Custom build command which generates man page. Custom build command which generates man page.
""" """
@ -31,20 +31,20 @@ class build_osc(build.build, object):
outfile.close() outfile.close()
def run(self): def run(self):
super(build_osc, self).run() super().run()
self.build_man_page() self.build_man_page()
# take a potential build-base option into account (for instance, if osc is # take a potential build-base option into account (for instance, if osc is
# build and installed like this: # build and installed like this:
# python setup.py build --build-base=<dir> ... install ...) # python setup.py build --build-base=<dir> ... install ...)
class install_data(install_data.install_data, object): class install_data(install_data.install_data):
def initialize_options(self): def initialize_options(self):
super(install_data, self).initialize_options() super().initialize_options()
self.built_data = None self.built_data = None
def finalize_options(self): def finalize_options(self):
super(install_data, self).finalize_options() super().finalize_options()
self.set_undefined_options('build', ('build_base', 'built_data')) self.set_undefined_options('build', ('build_base', 'built_data'))
data_files = [] data_files = []
for f in self.data_files: for f in self.data_files:

View File

@ -228,7 +228,7 @@ class OscTestCase(unittest.TestCase):
def _check_list(self, fname, exp): def _check_list(self, fname, exp):
fname = os.path.join('.osc', fname) fname = os.path.join('.osc', fname)
self.assertTrue(os.path.exists(fname)) self.assertTrue(os.path.exists(fname))
self.assertEqual(open(fname, 'r').read(), exp) self.assertEqual(open(fname).read(), exp)
def _check_addlist(self, exp): def _check_addlist(self, exp):
self._check_list('_to_be_added', exp) self._check_list('_to_be_added', exp)
@ -244,9 +244,9 @@ class OscTestCase(unittest.TestCase):
def _check_digests(self, fname, *skipfiles): def _check_digests(self, fname, *skipfiles):
fname = os.path.join(self._get_fixtures_dir(), fname) fname = os.path.join(self._get_fixtures_dir(), fname)
with open(os.path.join('.osc', '_files'), 'r') as f: with open(os.path.join('.osc', '_files')) as f:
files_act = f.read() files_act = f.read()
with open(fname, 'r') as f: with open(fname) as f:
files_exp = f.read() files_exp = f.read()
self.assertXMLEqual(files_act, files_exp) self.assertXMLEqual(files_act, files_exp)
root = ET.fromstring(files_act) root = ET.fromstring(files_act)

View File

@ -11,7 +11,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'addfile_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestAddFiles) return unittest.defaultTestLoader.loadTestsFromTestCase(TestAddFiles)
class TestAddFiles(OscTestCase): class TestAddFiles(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):
@ -71,7 +71,7 @@ class TestAddFiles(OscTestCase):
exp = 'A foo\n' exp = 'A foo\n'
self.assertEqual(sys.stdout.getvalue(), exp) self.assertEqual(sys.stdout.getvalue(), exp)
self.assertTrue(os.path.exists(os.path.join('.osc', 'foo'))) self.assertTrue(os.path.exists(os.path.join('.osc', 'foo')))
self.assertNotEqual(open(os.path.join('.osc', 'foo'), 'r').read(), 'replaced file\n') self.assertNotEqual(open(os.path.join('.osc', 'foo')).read(), 'replaced file\n')
self.assertFalse(os.path.exists(os.path.join('.osc', '_to_be_deleted'))) self.assertFalse(os.path.exists(os.path.join('.osc', '_to_be_deleted')))
self._check_status(p, 'foo', 'R') self._check_status(p, 'foo', 'R')
self._check_addlist('foo\n') self._check_addlist('foo\n')

View File

@ -13,7 +13,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'commit_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestCommit) return unittest.defaultTestLoader.loadTestsFromTestCase(TestCommit)
rev_dummy = '<revision rev="repository">\n <srcmd5>empty</srcmd5>\n</revision>' rev_dummy = '<revision rev="repository">\n <srcmd5>empty</srcmd5>\n</revision>'
@ -40,7 +40,7 @@ class TestCommit(OscTestCase):
self.assertEqual(sys.stdout.getvalue(), exp) self.assertEqual(sys.stdout.getvalue(), exp)
self._check_digests('testSimple_cfilesremote') self._check_digests('testSimple_cfilesremote')
self.assertTrue(os.path.exists('nochange')) self.assertTrue(os.path.exists('nochange'))
self.assertEqual(open('nochange', 'r').read(), open(os.path.join('.osc', 'nochange'), 'r').read()) self.assertEqual(open('nochange').read(), open(os.path.join('.osc', 'nochange')).read())
self._check_status(p, 'nochange', ' ') self._check_status(p, 'nochange', ' ')
self._check_status(p, 'foo', ' ') self._check_status(p, 'foo', ' ')
self._check_status(p, 'merge', ' ') self._check_status(p, 'merge', ' ')
@ -64,7 +64,7 @@ class TestCommit(OscTestCase):
self.assertEqual(sys.stdout.getvalue(), exp) self.assertEqual(sys.stdout.getvalue(), exp)
self._check_digests('testAddfile_cfilesremote') self._check_digests('testAddfile_cfilesremote')
self.assertTrue(os.path.exists('add')) self.assertTrue(os.path.exists('add'))
self.assertEqual(open('add', 'r').read(), open(os.path.join('.osc', 'add'), 'r').read()) self.assertEqual(open('add').read(), open(os.path.join('.osc', 'add')).read())
self.assertFalse(os.path.exists(os.path.join('.osc', '_to_be_added'))) self.assertFalse(os.path.exists(os.path.join('.osc', '_to_be_added')))
self._check_status(p, 'add', ' ') self._check_status(p, 'add', ' ')
self._check_status(p, 'foo', ' ') self._check_status(p, 'foo', ' ')
@ -241,7 +241,7 @@ class TestCommit(OscTestCase):
self.assertEqual(sys.stdout.getvalue(), exp) self.assertEqual(sys.stdout.getvalue(), exp)
self._check_digests('testAddfile_cfilesremote') self._check_digests('testAddfile_cfilesremote')
self.assertTrue(os.path.exists('add')) self.assertTrue(os.path.exists('add'))
self.assertEqual(open('add', 'r').read(), open(os.path.join('.osc', 'add'), 'r').read()) self.assertEqual(open('add').read(), open(os.path.join('.osc', 'add')).read())
self.assertFalse(os.path.exists(os.path.join('.osc', '_to_be_added'))) self.assertFalse(os.path.exists(os.path.join('.osc', '_to_be_added')))
self._check_status(p, 'add', ' ') self._check_status(p, 'add', ' ')
self._check_status(p, 'foo', ' ') self._check_status(p, 'foo', ' ')
@ -341,7 +341,7 @@ class TestCommit(OscTestCase):
self.assertEqual(sys.stdout.getvalue(), exp) self.assertEqual(sys.stdout.getvalue(), exp)
self._check_digests('testSimple_cfilesremote') self._check_digests('testSimple_cfilesremote')
self.assertTrue(os.path.exists('nochange')) self.assertTrue(os.path.exists('nochange'))
self.assertEqual(open('nochange', 'r').read(), open(os.path.join('.osc', 'nochange'), 'r').read()) self.assertEqual(open('nochange').read(), open(os.path.join('.osc', 'nochange')).read())
self._check_status(p, 'nochange', ' ') self._check_status(p, 'nochange', ' ')
self._check_status(p, 'foo', ' ') self._check_status(p, 'foo', ' ')
self._check_status(p, 'merge', ' ') self._check_status(p, 'merge', ' ')

View File

@ -10,7 +10,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'deletefile_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestDeleteFiles) return unittest.defaultTestLoader.loadTestsFromTestCase(TestDeleteFiles)
class TestDeleteFiles(OscTestCase): class TestDeleteFiles(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):

View File

@ -12,7 +12,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'difffile_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestDiffFiles) return unittest.defaultTestLoader.loadTestsFromTestCase(TestDiffFiles)
class TestDiffFiles(OscTestCase): class TestDiffFiles(OscTestCase):
diff_hdr = 'Index: %s\n===================================================================' diff_hdr = 'Index: %s\n==================================================================='
@ -325,9 +325,9 @@ Binary file 'binary' has changed.
def __canonise_diff(diff): def __canonise_diff(diff):
# we cannot use re.M because python 2.6's re.sub does # we cannot use re.M because python 2.6's re.sub does
# not support a flags argument # not support a flags argument
diff = [re.sub('^@@ -(\d+) ', '@@ -\\1,\\1 ', line) diff = [re.sub(r'^@@ -(\d+) ', '@@ -\\1,\\1 ', line)
for line in diff.split('\n')] for line in diff.split('\n')]
diff = [re.sub('^(@@ -\d+,\d+) \+(\d+) ', '\\1 +\\2,\\2 ', line) diff = [re.sub(r'^(@@ -\d+,\d+) \+(\d+) ', '\\1 +\\2,\\2 ', line)
for line in diff] for line in diff]
return '\n'.join(diff) return '\n'.join(diff)

View File

@ -4,7 +4,7 @@ from osc.util.helper import decode_it, decode_list
def suite(): def suite():
return unittest.makeSuite(TestResults) return unittest.defaultTestLoader.loadTestsFromTestCase(TestResults)
class TestResults(unittest.TestCase): class TestResults(unittest.TestCase):
def testDecodeList(self): def testDecodeList(self):

View File

@ -10,7 +10,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'init_package_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestInitPackage) return unittest.defaultTestLoader.loadTestsFromTestCase(TestInitPackage)
class TestInitPackage(OscTestCase): class TestInitPackage(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):

View File

@ -10,7 +10,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'init_project_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestInitProject) return unittest.defaultTestLoader.loadTestsFromTestCase(TestInitProject)
class TestInitProject(OscTestCase): class TestInitProject(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):

View File

@ -10,7 +10,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'project_package_status_f
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestPackageStatus) return unittest.defaultTestLoader.loadTestsFromTestCase(TestPackageStatus)
class TestPackageStatus(OscTestCase): class TestPackageStatus(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):

View File

@ -43,7 +43,7 @@ def POST_RDIFF(oldprj, newprj):
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestProjectDiff) return unittest.defaultTestLoader.loadTestsFromTestCase(TestProjectDiff)
class TestProjectDiff(OscTestCase): class TestProjectDiff(OscTestCase):
diff_hdr = 'Index: %s\n===================================================================' diff_hdr = 'Index: %s\n==================================================================='

View File

@ -10,7 +10,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'project_package_status_f
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestProjectStatus) return unittest.defaultTestLoader.loadTestsFromTestCase(TestProjectStatus)
class TestProjectStatus(OscTestCase): class TestProjectStatus(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):

View File

@ -12,7 +12,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'repairwc_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestRepairWC) return unittest.defaultTestLoader.loadTestsFromTestCase(TestRepairWC)
class TestRepairWC(OscTestCase): class TestRepairWC(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):
@ -223,7 +223,7 @@ class TestRepairWC(OscTestCase):
prj.wc_repair('http://localhost') prj.wc_repair('http://localhost')
self.assertTrue(os.path.exists(os.path.join(storedir, '_apiurl'))) self.assertTrue(os.path.exists(os.path.join(storedir, '_apiurl')))
self.assertTrue(os.path.exists(os.path.join(storedir, '_apiurl'))) self.assertTrue(os.path.exists(os.path.join(storedir, '_apiurl')))
self.assertEqual(open(os.path.join(storedir, '_apiurl'), 'r').read(), 'http://localhost\n') self.assertEqual(open(os.path.join(storedir, '_apiurl')).read(), 'http://localhost\n')
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -11,7 +11,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'request_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestRequest) return unittest.defaultTestLoader.loadTestsFromTestCase(TestRequest)
class TestRequest(OscTestCase): class TestRequest(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):
@ -322,7 +322,7 @@ class TestRequest(OscTestCase):
def test_read_request1(self): def test_read_request1(self):
"""read in a request""" """read in a request"""
xml = open(os.path.join(self._get_fixtures_dir(), 'test_read_request1.xml'), 'r').read().strip() xml = open(os.path.join(self._get_fixtures_dir(), 'test_read_request1.xml')).read().strip()
r = osc.core.Request() r = osc.core.Request()
r.read(ET.fromstring(xml)) r.read(ET.fromstring(xml))
self.assertEqual(r.reqid, '42') self.assertEqual(r.reqid, '42')
@ -353,7 +353,7 @@ class TestRequest(OscTestCase):
def test_read_request2(self): def test_read_request2(self):
"""read in a request (with reviews)""" """read in a request (with reviews)"""
xml = open(os.path.join(self._get_fixtures_dir(), 'test_read_request2.xml'), 'r').read().strip() xml = open(os.path.join(self._get_fixtures_dir(), 'test_read_request2.xml')).read().strip()
r = osc.core.Request() r = osc.core.Request()
r.read(ET.fromstring(xml)) r.read(ET.fromstring(xml))
self.assertEqual(r.reqid, '123') self.assertEqual(r.reqid, '123')
@ -427,7 +427,7 @@ class TestRequest(OscTestCase):
def test_request_list_view1(self): def test_request_list_view1(self):
"""test the list_view method""" """test the list_view method"""
xml = open(os.path.join(self._get_fixtures_dir(), 'test_request_list_view1.xml'), 'r').read().strip() xml = open(os.path.join(self._get_fixtures_dir(), 'test_request_list_view1.xml')).read().strip()
exp = """\ exp = """\
62 State:new By:Admin When:2010-12-29T14:57:25 62 State:new By:Admin When:2010-12-29T14:57:25
set_bugowner: buguser foo set_bugowner: buguser foo
@ -444,7 +444,7 @@ class TestRequest(OscTestCase):
def test_request_list_view2(self): def test_request_list_view2(self):
"""test the list_view method (with history elements and description)""" """test the list_view method (with history elements and description)"""
xml = open(os.path.join(self._get_fixtures_dir(), 'test_request_list_view2.xml'), 'r').read().strip() xml = open(os.path.join(self._get_fixtures_dir(), 'test_request_list_view2.xml')).read().strip()
r = osc.core.Request() r = osc.core.Request()
r.read(ET.fromstring(xml)) r.read(ET.fromstring(xml))
exp = """\ exp = """\
@ -458,7 +458,7 @@ class TestRequest(OscTestCase):
def test_request_str1(self): def test_request_str1(self):
"""test the __str__ method""" """test the __str__ method"""
xml = open(os.path.join(self._get_fixtures_dir(), 'test_request_str1.xml'), 'r').read().strip() xml = open(os.path.join(self._get_fixtures_dir(), 'test_request_str1.xml')).read().strip()
r = osc.core.Request() r = osc.core.Request()
r = osc.core.Request() r = osc.core.Request()
r.read(ET.fromstring(xml)) r.read(ET.fromstring(xml))
@ -555,7 +555,7 @@ Comment: <no comment>"""
def test_get_actions(self): def test_get_actions(self):
"""test get_actions method""" """test get_actions method"""
xml = open(os.path.join(self._get_fixtures_dir(), 'test_request_list_view1.xml'), 'r').read().strip() xml = open(os.path.join(self._get_fixtures_dir(), 'test_request_list_view1.xml')).read().strip()
r = osc.core.Request() r = osc.core.Request()
r.read(ET.fromstring(xml)) r.read(ET.fromstring(xml))
sr_actions = r.get_actions('submit') sr_actions = r.get_actions('submit')

View File

@ -8,7 +8,7 @@ from .common import GET, OscTestCase
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestResults) return unittest.defaultTestLoader.loadTestsFromTestCase(TestResults)
class TestResults(OscTestCase): class TestResults(OscTestCase):
def setUp(self): def setUp(self):
@ -29,7 +29,7 @@ class TestResults(OscTestCase):
return sys.stdout.getvalue() return sys.stdout.getvalue()
def _get_fixture(self, filename): def _get_fixture(self, filename):
return open(os.path.join(self._get_fixtures_dir(), filename), 'r').read() return open(os.path.join(self._get_fixtures_dir(), filename)).read()
@GET('http://localhost/build/testproject/_result', file='result.xml') @GET('http://localhost/build/testproject/_result', file='result.xml')
def testPrjresults(self): def testPrjresults(self):

View File

@ -10,7 +10,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'revertfile_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestRevertFiles) return unittest.defaultTestLoader.loadTestsFromTestCase(TestRevertFiles)
class TestRevertFiles(OscTestCase): class TestRevertFiles(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):
@ -93,7 +93,7 @@ class TestRevertFiles(OscTestCase):
storefile = os.path.join('.osc', fname) storefile = os.path.join('.osc', fname)
self.assertTrue(os.path.exists(fname)) self.assertTrue(os.path.exists(fname))
self.assertTrue(os.path.exists(storefile)) self.assertTrue(os.path.exists(storefile))
self.assertEqual(open(fname, 'r').read(), open(storefile, 'r').read()) self.assertEqual(open(fname).read(), open(storefile).read())
if __name__ == '__main__': if __name__ == '__main__':
import unittest import unittest

View File

@ -10,7 +10,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'setlinkrev_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestSetLinkRev) return unittest.defaultTestLoader.loadTestsFromTestCase(TestSetLinkRev)
class TestSetLinkRev(OscTestCase): class TestSetLinkRev(OscTestCase):
def setUp(self): def setUp(self):

View File

@ -11,7 +11,7 @@ FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'update_fixtures')
def suite(): def suite():
import unittest import unittest
return unittest.makeSuite(TestUpdate) return unittest.defaultTestLoader.loadTestsFromTestCase(TestUpdate)
class TestUpdate(OscTestCase): class TestUpdate(OscTestCase):
def _get_fixtures_dir(self): def _get_fixtures_dir(self):
@ -115,7 +115,7 @@ class TestUpdate(OscTestCase):
self.assertEqual(sys.stdout.getvalue(), exp) self.assertEqual(sys.stdout.getvalue(), exp)
self._check_deletelist('foo\n') self._check_deletelist('foo\n')
self._check_conflictlist('merge\n') self._check_conflictlist('merge\n')
self.assertEqual(open('foo', 'r').read(), open(os.path.join('.osc', 'foo'), 'r').read()) self.assertEqual(open('foo').read(), open(os.path.join('.osc', 'foo')).read())
self._check_digests('testUpdateLocalDeletions_files') self._check_digests('testUpdateLocalDeletions_files')
@GET('http://localhost/source/osctest/restore?rev=latest', file='testUpdateRestore_files') @GET('http://localhost/source/osctest/restore?rev=latest', file='testUpdateRestore_files')