%pyproject_buildrequires: Add support for self-referential extras requirements

This commit is contained in:
Miro Hrončok 2023-04-27 09:47:47 +02:00
parent a5e7a3cd07
commit bd7890110c
3 changed files with 199 additions and 9 deletions

View File

@ -147,9 +147,11 @@ export HOSTNAME="rpmbuild" # to speedup tox in network-less mock, see rhbz#1856
%changelog %changelog
* Tue Apr 18 2023 Miro Hrončok <mhroncok@redhat.com> - 1.8.0-1 * Thu Apr 27 2023 Miro Hrončok <mhroncok@redhat.com> - 1.8.0-1
- %%pyproject_buildrequires: Add support for self-referential extras requirements
Fixes: rhbz#2171343
- Deprecate the provisional %%{pyproject_build_lib} macro - Deprecate the provisional %%{pyproject_build_lib} macro
- See https://lists.fedoraproject.org/archives/list/python-devel@lists.fedoraproject.org/thread/HMLOPAU3RZLXD4BOJHTIPKI3I4U6U7OE/ See https://lists.fedoraproject.org/archives/list/python-devel@lists.fedoraproject.org/thread/HMLOPAU3RZLXD4BOJHTIPKI3I4U6U7OE/
* Fri Mar 31 2023 Miro Hrončok <mhroncok@redhat.com> - 1.7.0-1 * Fri Mar 31 2023 Miro Hrončok <mhroncok@redhat.com> - 1.7.0-1
- %%pyproject_buildrequires: Redirect stdout to stderr via Shell - %%pyproject_buildrequires: Redirect stdout to stderr via Shell

View File

@ -77,6 +77,7 @@ class Requirements:
self.add_extras(*extra.split(',')) self.add_extras(*extra.split(','))
self.missing_requirements = False self.missing_requirements = False
self.ignored_alien_requirements = []
self.generate_extras = generate_extras self.generate_extras = generate_extras
self.python3_pkgversion = python3_pkgversion self.python3_pkgversion = python3_pkgversion
@ -96,7 +97,7 @@ class Requirements:
return True return True
return False return False
def add(self, requirement_str, *, source=None): def add(self, requirement_str, *, package_name=None, source=None):
"""Output a Python-style requirement string as RPM dep""" """Output a Python-style requirement string as RPM dep"""
print_err(f'Handling {requirement_str} from {source}') print_err(f'Handling {requirement_str} from {source}')
@ -118,6 +119,21 @@ class Requirements:
if (requirement.marker is not None and if (requirement.marker is not None and
not self.evaluate_all_environments(requirement)): not self.evaluate_all_environments(requirement)):
print_err(f'Ignoring alien requirement:', requirement_str) print_err(f'Ignoring alien requirement:', requirement_str)
self.ignored_alien_requirements.append(requirement_str)
return
# Handle self-referencing requirements
if package_name and canonicalize_name(package_name) == name:
# Self-referential extras need to be handled specially
if requirement.extras:
if not (requirement.extras <= self.extras): # only handle it if needed
# let all further requirements know we want those extras
self.add_extras(*requirement.extras)
# re-add all of the alien requirements ignored in the past
# they might no longer be alien now
self.readd_ignored_alien_requirements(package_name=package_name)
else:
print_err(f'Ignoring self-referential requirement without extras:', requirement_str)
return return
# We need to always accept pre-releases as satisfying the requirement # We need to always accept pre-releases as satisfying the requirement
@ -176,6 +192,12 @@ class Requirements:
for req_str in requirement_strs: for req_str in requirement_strs:
self.add(req_str, **kwargs) self.add(req_str, **kwargs)
def readd_ignored_alien_requirements(self, **kwargs):
"""add() previously ignored alien requirements again."""
requirements, self.ignored_alien_requirements = self.ignored_alien_requirements, []
kwargs.setdefault('source', 'Previously ignored alien requirements')
self.extend(requirements, **kwargs)
def toml_load(opened_binary_file): def toml_load(opened_binary_file):
try: try:
@ -256,11 +278,25 @@ def generate_build_requirements(backend, requirements):
requirements.check(source='get_requires_for_build_wheel') requirements.check(source='get_requires_for_build_wheel')
def requires_from_metadata_file(metadata_file): def parse_metadata_file(metadata_file):
message = email.parser.Parser().parse(metadata_file, headersonly=True) return email.parser.Parser().parse(metadata_file, headersonly=True)
def requires_from_parsed_metadata_file(message):
return {k: message.get_all(k, ()) for k in ('Requires', 'Requires-Dist')} return {k: message.get_all(k, ()) for k in ('Requires', 'Requires-Dist')}
def package_name_from_parsed_metadata_file(message):
return message.get('name')
def package_name_and_requires_from_metadata_file(metadata_file):
message = parse_metadata_file(metadata_file)
package_name = package_name_from_parsed_metadata_file(message)
requires = requires_from_parsed_metadata_file(message)
return package_name, requires
def generate_run_requirements_hook(backend, requirements): def generate_run_requirements_hook(backend, requirements):
hook_name = 'prepare_metadata_for_build_wheel' hook_name = 'prepare_metadata_for_build_wheel'
prepare_metadata = getattr(backend, hook_name, None) prepare_metadata = getattr(backend, hook_name, None)
@ -273,8 +309,11 @@ def generate_run_requirements_hook(backend, requirements):
) )
dir_basename = prepare_metadata('.') dir_basename = prepare_metadata('.')
with open(dir_basename + '/METADATA') as metadata_file: with open(dir_basename + '/METADATA') as metadata_file:
for key, requires in requires_from_metadata_file(metadata_file).items(): name, requires = package_name_and_requires_from_metadata_file(metadata_file)
requirements.extend(requires, source=f'hook generated metadata: {key}') for key, req in requires.items():
requirements.extend(req,
package_name=name,
source=f'hook generated metadata: {key} ({name})')
def find_built_wheel(wheeldir): def find_built_wheel(wheeldir):
@ -304,8 +343,11 @@ def generate_run_requirements_wheel(backend, requirements, wheeldir):
for name in wheelfile.namelist(): for name in wheelfile.namelist():
if name.count('/') == 1 and name.endswith('.dist-info/METADATA'): if name.count('/') == 1 and name.endswith('.dist-info/METADATA'):
with io.TextIOWrapper(wheelfile.open(name), encoding='utf-8') as metadata_file: with io.TextIOWrapper(wheelfile.open(name), encoding='utf-8') as metadata_file:
for key, requires in requires_from_metadata_file(metadata_file).items(): name, requires = package_name_and_requires_from_metadata_file(metadata_file)
requirements.extend(requires, source=f'built wheel metadata: {key}') for key, req in requires.items():
requirements.extend(req,
package_name=name,
source=f'built wheel metadata: {key} ({name})')
break break
else: else:
raise RuntimeError('Could not find *.dist-info/METADATA in built wheel.') raise RuntimeError('Could not find *.dist-info/METADATA in built wheel.')

View File

@ -835,3 +835,149 @@ Stdout from wrapped subprocess does not appear in output:
python3dist(wheel) python3dist(wheel)
python3dist(wheel) python3dist(wheel)
result: 0 result: 0
pyproject.toml with runtime dependencies:
installed:
setuptools: 50
wheel: 1
toml: 1
pyproject.toml: |
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "my_package"
version = "0.1"
dependencies = [
"foo",
'importlib-metadata; python_version<"3.8"',
]
expected: |
python3dist(setuptools)
python3dist(wheel)
python3dist(foo)
result: 0
pyproject.toml with runtime dependencies and partially selected extras:
installed:
setuptools: 50
wheel: 1
toml: 1
extras:
- tests
pyproject.toml: |
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "my_package"
version = "0.1"
dependencies = [
"foo",
'importlib-metadata; python_version<"3.8"',
]
[project.optional-dependencies]
tests = ["pytest>=5", "pytest-mock"]
docs = ["sphinx", "python-docs-theme"]
expected: |
python3dist(setuptools)
python3dist(wheel)
python3dist(foo)
python3dist(pytest) >= 5
python3dist(pytest-mock)
result: 0
pyproject.toml with runtime dependencies and self-referencing extras (sooner):
installed:
setuptools: 50
wheel: 1
toml: 1
extras:
- dev # this is deliberately sooner in the alphabet than the referenced ones
pyproject.toml: |
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "my_package"
version = "0.1"
dependencies = [
"foo",
'importlib-metadata; python_version<"3.8"',
]
[project.optional-dependencies]
tests = ["pytest>=5", "pytest-mock"]
docs = ["sphinx", "python-docs-theme"]
dev = ["my_package[docs,tests]"]
expected: |
python3dist(setuptools)
python3dist(wheel)
python3dist(foo)
python3dist(sphinx)
python3dist(python-docs-theme)
python3dist(pytest) >= 5
python3dist(pytest-mock)
result: 0
pyproject.toml with runtime dependencies and self-referencing extras (later):
installed:
setuptools: 50
wheel: 1
toml: 1
extras:
- xdev # this is deliberately later in the alphabet than the referenced ones
pyproject.toml: |
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "my_package"
version = "0.1"
dependencies = [
"foo",
'importlib-metadata; python_version<"3.8"',
]
[project.optional-dependencies]
tests = ["pytest>=5", "pytest-mock"]
docs = ["sphinx", "python-docs-theme"]
xdev = ["my_package[docs,tests]"]
expected: |
python3dist(setuptools)
python3dist(wheel)
python3dist(foo)
python3dist(sphinx)
python3dist(python-docs-theme)
python3dist(pytest) >= 5
python3dist(pytest-mock)
result: 0
pyproject.toml with runtime dependencies and self-referencing extras (maze):
installed:
setuptools: 50
wheel: 1
toml: 1
extras:
- start
pyproject.toml: |
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "my_package"
version = "0.1"
[project.optional-dependencies]
start = ["my_package[left,right]", "startdep"]
left = ["my_package[right,forward]", "leftdep"]
right = ["my_package[left,forward]", "rightdep"]
forward = ["my_package[backward]", "forwarddep"]
backward = ["my_package[left,right]", "backwarddep"]
never = ["my_package[forward]", "neverdep"]
expected: |
python3dist(setuptools)
python3dist(wheel)
python3dist(backwarddep)
python3dist(forwarddep)
python3dist(leftdep)
python3dist(rightdep)
python3dist(startdep)
result: 0