Use f-strings where possible

Makes the code slightly more condense and easier to read
This commit is contained in:
Dirk Mueller 2024-10-01 10:06:50 +02:00
parent ba61d2071f
commit 2918627094
No known key found for this signature in database
2 changed files with 29 additions and 30 deletions

View File

@ -154,7 +154,7 @@ class Group(object):
# only add recommends that exist as packages
rec = pool.select(dep.str(), solv.Selection.SELECTION_NAME)
if not rec.isempty():
extra.append([dep.str(), group + ':recommended:' + n])
extra.append([dep.str(), f"{group}:recommended:{n}"])
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
@ -183,10 +183,10 @@ class Group(object):
for s in solver.get_recommended():
if s.name in locked:
continue
self.recommends.setdefault(s.name, group + ':' + n)
self.recommends.setdefault(s.name, f"{group}:{n}")
if n in self.expand_suggested:
for s in solver.get_suggested():
suggested[s.name] = group + ':suggested:' + n
suggested[s.name] = f"{group}:suggested:{n}"
self.suggested.setdefault(s.name, suggested[s.name])
trans = solver.transaction()
@ -195,7 +195,7 @@ class Group(object):
return
for s in trans.newsolvables():
solved[arch].setdefault(s.name, group + ':' + n)
solved[arch].setdefault(s.name, f"{group}:{n}")
if None:
reason, rule = solver.describe_decision(s)
print(self.name, s.name, reason, rule.info().problemstr())
@ -204,7 +204,7 @@ class Group(object):
src = s.name
else:
src = s.lookup_str(solv.SOLVABLE_SOURCENAME)
self.srcpkgs[src] = group + ':' + s.name
self.srcpkgs[src] = f"{group}:{s.name}"
start = time.time()
for n, group in self.packages[arch]:
@ -233,7 +233,7 @@ class Group(object):
solver.solve(jobs)
trans = solver.transaction()
for s in trans.newsolvables():
solved[arch].setdefault(s.name, group + ':expansion')
solved[arch].setdefault(s.name, f"{group}:expansion")
end = time.time()
self.logger.info('%s - solving took %f', self.name, end - start)
@ -274,15 +274,14 @@ class Group(object):
for arch in self.pkglist.filtered_architectures:
mp.update(m.solved_packages[arch])
if len(packages & mp):
overlap.comment += '\n overlapping between ' + self.name + ' and ' + m.name + '\n'
overlap.comment += f"\n overlapping between {self.name} and {m.name}\n"
for p in sorted(packages & mp):
for arch in list(m.solved_packages):
if m.solved_packages[arch].get(p, None):
overlap.comment += ' # ' + m.name + '.' + arch + ': ' + m.solved_packages[arch][p] + '\n'
overlap.comment += f" # {m.name}.{arch}: {m.solved_packages[arch][p]}\n"
if self.solved_packages[arch].get(p, None):
overlap.comment += ' # ' + self.name + '.' + \
arch + ': ' + self.solved_packages[arch][p] + '\n'
overlap.comment += ' - ' + p + '\n'
overlap.comment += f" # {self.name}.{arch}: {self.solved_packages[arch][p]}\n"
overlap.comment += f" - {p}\n"
overlap._add_to_packages(p)
def collect_devel_packages(self):
@ -355,7 +354,7 @@ class Group(object):
name = self.name
if arch != '*':
name += '.' + arch
name += f".{arch}"
root = ET.Element('group', {'name': name})
if comment:

View File

@ -140,7 +140,7 @@ class PkgListGen(ToolBase.ToolBase):
archs = ['*'] + self.all_architectures
# a single file covering all builds via multibuild flavors
with open(os.path.join(self.output_dir, 'default.productcompose'), 'a') as opfh:
opfh.write(PRODUCT_COMPOSE_SEPERATOR_LINE + "\n")
opfh.write(f"{PRODUCT_COMPOSE_SEPERATOR_LINE}\n")
for name in self.groups:
group = self.groups[name]
if not group.solved:
@ -158,11 +158,11 @@ class PkgListGen(ToolBase.ToolBase):
opfh.write(f" - {f}\n")
if arch == '*':
for included_arch in self.all_architectures:
opfh.write(' - ' + f + '_' + included_arch + '\n')
opfh.write(f" - {f}_{included_arch}\n")
else:
opfh.write(' - ' + f + '_' + arch + '\n')
opfh.write(f" - {f}_{arch}\n")
opfh.write(' architectures:\n')
opfh.write(' - ' + arch + '\n')
opfh.write(f" - {arch}\n")
opfh.write(' packages:\n')
opfh.write(group.tocompose(' ', arch, group.ignore_broken))
# write main group including all sets
@ -172,9 +172,9 @@ class PkgListGen(ToolBase.ToolBase):
if not group.solved:
continue
for f in group.flavors:
opfh.write(' - ' + f + '\n')
opfh.write(f" - {f}\n")
for included_arch in self.all_architectures:
opfh.write(' - ' + f + '_' + included_arch + '\n')
opfh.write(f" - {f}_{included_arch}\n")
def write_all_groups(self):
self._check_supplements()
@ -250,8 +250,8 @@ class PkgListGen(ToolBase.ToolBase):
tocheck.add(s.name)
for locale in self.locales:
id = pool.str2id(f'locale({locale})')
for s in pool.whatprovides(id):
pool_id = pool.str2id(f'locale({locale})')
for s in pool.whatprovides(pool_id):
tocheck_locales.add(s.name)
all_grouped = set()
@ -381,7 +381,7 @@ class PkgListGen(ToolBase.ToolBase):
fh.write(']')
reason = self._find_reason(p, modules)
if reason:
fh.write(' # ' + reason)
fh.write(f" # {reason}")
fh.write(' \n')
# give a hint if the package is related to a group
@ -389,18 +389,18 @@ class PkgListGen(ToolBase.ToolBase):
# go through the modules multiple times to find the "best"
for g in modules:
if package in g.recommends:
return 'recommended by ' + g.recommends[package]
return f"recommended by {g.recommends[package]}"
for g in modules:
if package in g.suggested:
return 'suggested by ' + g.suggested[package]
return f"suggested by {g.suggested[package]}"
for g in modules:
if package in g.develpkgs:
return 'devel package of ' + g.develpkgs[package]
return f"devel package of {g.develpkgs[package]}"
return None
def update_one_repo(self, project, repo, arch, solv_file, solv_file_hash):
# Either hash changed or new, so remove any old hash files.
file_utils.unlink_list(None, glob.glob(solv_file + '::*'))
file_utils.unlink_list(None, glob.glob(f"{solv_file}::*"))
d = os.path.join(CACHEDIR, project, repo, arch)
if not os.path.exists(d):
@ -686,7 +686,7 @@ class PkgListGen(ToolBase.ToolBase):
host = urlparse(api.apiurl).hostname
prefix_dir = 'pkglistgen'
if custom_cache_tag:
prefix_dir += '-' + custom_cache_tag
prefix_dir += f"-{custom_cache_tag}"
cache_dir = CacheManager.directory(prefix_dir, host, project)
drop_list = []
@ -701,7 +701,7 @@ class PkgListGen(ToolBase.ToolBase):
return
if git_url:
git_url_base, *fragment = git_url.split('#')
if os.path.exists(cache_dir + "/.git"):
if os.path.exists(f"{cache_dir}/.git"):
# reset and update existing clone
logging.debug(subprocess.check_output(
['git', 'reset', '--hard'], cwd=cache_dir, encoding='utf-8'))
@ -718,7 +718,7 @@ class PkgListGen(ToolBase.ToolBase):
if fragment:
args += ['--branch', fragment[0]]
logging.debug(subprocess.check_output(args, encoding='utf-8'))
if os.path.exists(cache_dir + '/' + '000update-repos'):
if os.path.exists(f"{cache_dir}/000update-repos"):
logging.error('No support for 000update-repos in git projects atm')
return
else:
@ -891,7 +891,7 @@ class PkgListGen(ToolBase.ToolBase):
if package.get_status(False, ' '):
todo_spec_files = glob.glob(os.path.join(release_dir, '*.spec'))
for spec_file in todo_spec_files:
changes_file = os.path.splitext(spec_file)[0] + '.changes'
changes_file = f"{os.path.splitext(spec_file)[0]}.changes"
with open(changes_file, 'w', encoding="utf-8") as f:
date = datetime.now(timezone.utc)
date = date.strftime("%a %b %d %H:%M:%S %Z %Y")
@ -920,7 +920,7 @@ class PkgListGen(ToolBase.ToolBase):
with open(summary_file, 'w') as f:
for line in sorted(output):
f.write(line + '\n')
f.write(f"{line}\n")
if git_url:
logging.debug(subprocess.check_output(