Accepting request 1102628 from home:dgarcia:branches:devel:languages:python:pytest
- Update to 4.0.0rc5 - Add drop-assertpy-dep.patch to remove assertpy dependency - Add vendor-npm.patch to vendor npm build requirements OBS-URL: https://build.opensuse.org/request/show/1102628 OBS-URL: https://build.opensuse.org/package/show/devel:languages:python:pytest/python-pytest-html?expand=0&rev=33
This commit is contained in:
parent
0f180bdee4
commit
7fe7082838
513
drop-assertpy-dep.patch
Normal file
513
drop-assertpy-dep.patch
Normal file
@ -0,0 +1,513 @@
|
||||
Index: pytest_html-4.0.0rc5/testing/test_e2e.py
|
||||
===================================================================
|
||||
--- pytest_html-4.0.0rc5.orig/testing/test_e2e.py
|
||||
+++ pytest_html-4.0.0rc5/testing/test_e2e.py
|
||||
@@ -5,7 +5,6 @@ import urllib.parse
|
||||
|
||||
import pytest
|
||||
import selenium.webdriver.support.expected_conditions as ec
|
||||
-from assertpy import assert_that
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support.wait import WebDriverWait
|
||||
@@ -67,7 +66,7 @@ def test_visible(pytester, path, driver)
|
||||
ec.visibility_of_all_elements_located((By.CSS_SELECTOR, "#results-table"))
|
||||
)
|
||||
result = driver.find_elements(By.CSS_SELECTOR, "tr.collapsible")
|
||||
- assert_that(result).is_length(2)
|
||||
+ assert len(result) == 2
|
||||
|
||||
query_params = _encode_query_params({"visible": ""})
|
||||
driver.get(f"file:///reports{path()}?{query_params}")
|
||||
@@ -75,4 +74,4 @@ def test_visible(pytester, path, driver)
|
||||
ec.visibility_of_all_elements_located((By.CSS_SELECTOR, "#results-table"))
|
||||
)
|
||||
result = driver.find_elements(By.CSS_SELECTOR, "tr.collapsible")
|
||||
- assert_that(result).is_length(0)
|
||||
+ assert len(result) == 0
|
||||
Index: pytest_html-4.0.0rc5/testing/test_integration.py
|
||||
===================================================================
|
||||
--- pytest_html-4.0.0rc5.orig/testing/test_integration.py
|
||||
+++ pytest_html-4.0.0rc5/testing/test_integration.py
|
||||
@@ -9,7 +9,6 @@ from base64 import b64encode
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
-from assertpy import assert_that
|
||||
from bs4 import BeautifulSoup
|
||||
from selenium import webdriver
|
||||
|
||||
@@ -76,7 +75,7 @@ def assert_results(
|
||||
if isinstance(number, int):
|
||||
number_of_tests += number
|
||||
result = get_text(page, f"span[class={outcome}]")
|
||||
- assert_that(result).matches(rf"{number} {OUTCOMES[outcome]}")
|
||||
+ assert re.match(rf"u{number} {OUTCOMES[outcome]}", result)
|
||||
|
||||
|
||||
def get_element(page, selector):
|
||||
@@ -142,20 +141,18 @@ class TestHTML:
|
||||
duration = get_text(page, "#results-table td[class='col-duration']")
|
||||
total_duration = get_text(page, "p[class='run-count']")
|
||||
if pause < 1:
|
||||
- assert_that(int(duration.replace("ms", ""))).is_between(
|
||||
- expectation, expectation * 2
|
||||
- )
|
||||
- assert_that(total_duration).matches(r"\d+\s+ms")
|
||||
+ assert expectation < int(duration.replace("ms", "")) < expectation * 2
|
||||
+ assert re.match(r"\d+\s+ms", total_duration)
|
||||
else:
|
||||
- assert_that(duration).matches(expectation)
|
||||
- assert_that(total_duration).matches(r"\d{2}:\d{2}:\d{2}")
|
||||
+ assert re.match(expectation, duration)
|
||||
+ assert re.match(r"\d{2}:\d{2}:\d{2}", total_duration)
|
||||
|
||||
def test_total_number_of_tests_zero(self, pytester):
|
||||
page = run(pytester)
|
||||
assert_results(page)
|
||||
|
||||
total = get_text(page, "p[class='run-count']")
|
||||
- assert_that(total).matches(r"0 test(?!s)")
|
||||
+ assert re.match(r"0 test(?!s)", total)
|
||||
|
||||
def test_total_number_of_tests_singular(self, pytester):
|
||||
pytester.makepyfile("def test_pass(): pass")
|
||||
@@ -163,7 +160,7 @@ class TestHTML:
|
||||
assert_results(page, passed=1)
|
||||
|
||||
total = get_text(page, "p[class='run-count']")
|
||||
- assert_that(total).matches(r"1 test(?!s)")
|
||||
+ assert re.match(r"1 test(?!s)", total)
|
||||
|
||||
def test_total_number_of_tests_plural(self, pytester):
|
||||
pytester.makepyfile(
|
||||
@@ -176,7 +173,7 @@ class TestHTML:
|
||||
assert_results(page, passed=2)
|
||||
|
||||
total = get_text(page, "p[class='run-count']")
|
||||
- assert_that(total).matches(r"2 tests(?!\S)")
|
||||
+ assert re.match(r"2 tests(?!\S)", total)
|
||||
|
||||
def test_pass(self, pytester):
|
||||
pytester.makepyfile("def test_pass(): pass")
|
||||
@@ -196,7 +193,7 @@ class TestHTML:
|
||||
assert_results(page, skipped=1, total_tests=0)
|
||||
|
||||
log = get_text(page, ".summary div[class='log']")
|
||||
- assert_that(log).contains(reason)
|
||||
+ assert reason in log
|
||||
|
||||
def test_skip_function_marker(self, pytester):
|
||||
reason = str(random.random())
|
||||
@@ -212,7 +209,7 @@ class TestHTML:
|
||||
assert_results(page, skipped=1, total_tests=0)
|
||||
|
||||
log = get_text(page, ".summary div[class='log']")
|
||||
- assert_that(log).contains(reason)
|
||||
+ assert reason in log
|
||||
|
||||
def test_skip_class_marker(self, pytester):
|
||||
reason = str(random.random())
|
||||
@@ -229,16 +226,14 @@ class TestHTML:
|
||||
assert_results(page, skipped=1, total_tests=0)
|
||||
|
||||
log = get_text(page, ".summary div[class='log']")
|
||||
- assert_that(log).contains(reason)
|
||||
+ assert reason in log
|
||||
|
||||
def test_fail(self, pytester):
|
||||
pytester.makepyfile("def test_fail(): assert False")
|
||||
page = run(pytester)
|
||||
assert_results(page, failed=1)
|
||||
- assert_that(get_log(page)).contains("AssertionError")
|
||||
- assert_that(get_text(page, ".summary div[class='log'] span.error")).matches(
|
||||
- r"^E\s+assert False$"
|
||||
- )
|
||||
+ assert "AssertionError" in get_log(page)
|
||||
+ assert re.match(r"^E\s+assert False$", get_text(page, ".summary div[class='log'] span.error"))
|
||||
|
||||
def test_xfail(self, pytester):
|
||||
reason = str(random.random())
|
||||
@@ -251,7 +246,7 @@ class TestHTML:
|
||||
)
|
||||
page = run(pytester)
|
||||
assert_results(page, xfailed=1)
|
||||
- assert_that(get_log(page)).contains(reason)
|
||||
+ assert reason in get_log(page)
|
||||
|
||||
def test_xfail_function_marker(self, pytester):
|
||||
reason = str(random.random())
|
||||
@@ -265,7 +260,7 @@ class TestHTML:
|
||||
)
|
||||
page = run(pytester)
|
||||
assert_results(page, xfailed=1)
|
||||
- assert_that(get_log(page)).contains(reason)
|
||||
+ assert reason in get_log(page)
|
||||
|
||||
def test_xfail_class_marker(self, pytester):
|
||||
pytester.makepyfile(
|
||||
@@ -353,8 +348,8 @@ class TestHTML:
|
||||
assert_results(page, error=1, total_tests=0)
|
||||
|
||||
col_name = get_text(page, ".summary td[class='col-name']")
|
||||
- assert_that(col_name).contains("::setup")
|
||||
- assert_that(get_log(page)).contains("ValueError")
|
||||
+ assert "::setup" in col_name
|
||||
+ assert "ValueError" in get_log(page)
|
||||
|
||||
@pytest.mark.parametrize("title", ["", "Special Report"])
|
||||
def test_report_title(self, pytester, title):
|
||||
@@ -371,8 +366,8 @@ class TestHTML:
|
||||
|
||||
expected_title = title if title else "report.html"
|
||||
page = run(pytester)
|
||||
- assert_that(get_text(page, "#head-title")).is_equal_to(expected_title)
|
||||
- assert_that(get_text(page, "h1[id='title']")).is_equal_to(expected_title)
|
||||
+ assert expected_title == get_text(page, "#head-title")
|
||||
+ assert expected_title == get_text(page, "h1[id='title']")
|
||||
|
||||
def test_resources_inline_css(self, pytester):
|
||||
pytester.makepyfile("def test_pass(): pass")
|
||||
@@ -380,15 +375,13 @@ class TestHTML:
|
||||
|
||||
content = file_content()
|
||||
|
||||
- assert_that(get_text(page, "head style").strip()).contains(content)
|
||||
+ assert content in get_text(page, "head style").strip()
|
||||
|
||||
def test_resources_css(self, pytester):
|
||||
pytester.makepyfile("def test_pass(): pass")
|
||||
page = run(pytester)
|
||||
|
||||
- assert_that(page.select_one("head link")["href"]).is_equal_to(
|
||||
- str(Path("assets", "style.css"))
|
||||
- )
|
||||
+ assert page.select_one("head link")["href"] == str(Path("assets", "style.css"))
|
||||
|
||||
def test_custom_content_in_summary(self, pytester):
|
||||
content = {
|
||||
@@ -412,11 +405,11 @@ class TestHTML:
|
||||
page = run(pytester)
|
||||
|
||||
elements = page.select(".summary__data p:not(.run-count):not(.filter)")
|
||||
- assert_that(elements).is_length(3)
|
||||
+ assert len(elements) == 3
|
||||
for element in elements:
|
||||
key = re.search(r"(\w+).*", element.string).group(1)
|
||||
value = content.pop(key)
|
||||
- assert_that(element.string).contains(value)
|
||||
+ assert value in element.string
|
||||
|
||||
def test_extra_html(self, pytester):
|
||||
content = str(random.random())
|
||||
@@ -437,7 +430,7 @@ class TestHTML:
|
||||
pytester.makepyfile("def test_pass(): pass")
|
||||
page = run(pytester)
|
||||
|
||||
- assert_that(page.select_one(".summary .extraHTML").string).is_equal_to(content)
|
||||
+ assert content == page.select_one(".summary .extraHTML").string
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"content, encoded",
|
||||
@@ -461,10 +454,8 @@ class TestHTML:
|
||||
page = run(pytester, cmd_flags=["--self-contained-html"])
|
||||
|
||||
element = page.select_one(".summary a[class='col-links__extra text']")
|
||||
- assert_that(element.string).is_equal_to("Text")
|
||||
- assert_that(element["href"]).is_equal_to(
|
||||
- f"data:text/plain;charset=utf-8;base64,{encoded}"
|
||||
- )
|
||||
+ assert "Text" == element.string
|
||||
+ assert element["href"] == f"data:text/plain;charset=utf-8;base64,{encoded}"
|
||||
|
||||
def test_extra_json(self, pytester):
|
||||
content = {str(random.random()): str(random.random())}
|
||||
@@ -489,10 +480,8 @@ class TestHTML:
|
||||
data = b64encode(content_str.encode("utf-8")).decode("ascii")
|
||||
|
||||
element = page.select_one(".summary a[class='col-links__extra json']")
|
||||
- assert_that(element.string).is_equal_to("JSON")
|
||||
- assert_that(element["href"]).is_equal_to(
|
||||
- f"data:application/json;charset=utf-8;base64,{data}"
|
||||
- )
|
||||
+ assert "JSON" == element.string
|
||||
+ assert element["href"] == f"data:application/json;charset=utf-8;base64,{data}"
|
||||
|
||||
def test_extra_url(self, pytester):
|
||||
content = str(random.random())
|
||||
@@ -513,8 +502,8 @@ class TestHTML:
|
||||
page = run(pytester)
|
||||
|
||||
element = page.select_one(".summary a[class='col-links__extra url']")
|
||||
- assert_that(element.string).is_equal_to("URL")
|
||||
- assert_that(element["href"]).is_equal_to(content)
|
||||
+ assert "URL" == element.string
|
||||
+ assert element["href"] == content
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"mime_type, extension",
|
||||
@@ -552,7 +541,7 @@ class TestHTML:
|
||||
# assert_that(element["href"]).is_equal_to(src)
|
||||
|
||||
element = page.select_one(".summary .media img")
|
||||
- assert_that(str(element)).is_equal_to(f'<img src="{src}"/>')
|
||||
+ assert str(element) == f'<img src="{src}"/>'
|
||||
|
||||
@pytest.mark.parametrize("mime_type, extension", [("video/mp4", "mp4")])
|
||||
def test_extra_video(self, pytester, mime_type, extension):
|
||||
@@ -580,9 +569,7 @@ class TestHTML:
|
||||
# assert_that(element["href"]).is_equal_to(src)
|
||||
|
||||
element = page.select_one(".summary .media video")
|
||||
- assert_that(str(element)).is_equal_to(
|
||||
- f'<video controls="">\n<source src="{src}" type="{mime_type}"/>\n</video>'
|
||||
- )
|
||||
+ assert str(element) == f'<video controls="">\n<source src="{src}" type="{mime_type}"/>\n</video>'
|
||||
|
||||
def test_xdist(self, pytester):
|
||||
pytester.makepyfile("def test_xdist(): pass")
|
||||
@@ -613,19 +600,10 @@ class TestHTML:
|
||||
|
||||
description_index = 5
|
||||
time_index = 6
|
||||
- assert_that(get_text(page, header_selector.format(time_index))).is_equal_to(
|
||||
- "Time"
|
||||
- )
|
||||
- assert_that(
|
||||
- get_text(page, header_selector.format(description_index))
|
||||
- ).is_equal_to("Description")
|
||||
-
|
||||
- assert_that(get_text(page, row_selector.format(time_index))).is_equal_to(
|
||||
- "A time"
|
||||
- )
|
||||
- assert_that(get_text(page, row_selector.format(description_index))).is_equal_to(
|
||||
- "A description"
|
||||
- )
|
||||
+ assert "Time" == get_text(page, header_selector.format(time_index))
|
||||
+ assert "Description" == get_text(page, header_selector.format(description_index))
|
||||
+ assert "A time" == get_text(page, row_selector.format(time_index))
|
||||
+ assert "A description" == get_text(page, row_selector.format(description_index))
|
||||
|
||||
def test_results_table_hook_insert(self, pytester):
|
||||
header_selector = (
|
||||
@@ -652,19 +630,10 @@ class TestHTML:
|
||||
|
||||
description_index = 4
|
||||
time_index = 2
|
||||
- assert_that(get_text(page, header_selector.format(time_index))).is_equal_to(
|
||||
- "Time"
|
||||
- )
|
||||
- assert_that(
|
||||
- get_text(page, header_selector.format(description_index))
|
||||
- ).is_equal_to("Description")
|
||||
-
|
||||
- assert_that(get_text(page, row_selector.format(time_index))).is_equal_to(
|
||||
- "A time"
|
||||
- )
|
||||
- assert_that(get_text(page, row_selector.format(description_index))).is_equal_to(
|
||||
- "A description"
|
||||
- )
|
||||
+ assert "Time" == get_text(page, header_selector.format(time_index))
|
||||
+ assert "Description" == get_text(page, header_selector.format(description_index))
|
||||
+ assert "A time" == get_text(page, row_selector.format(time_index))
|
||||
+ assert "A description" == get_text(page, row_selector.format(description_index))
|
||||
|
||||
def test_results_table_hook_delete(self, pytester):
|
||||
pytester.makeconftest(
|
||||
@@ -701,12 +670,12 @@ class TestHTML:
|
||||
page = run(pytester)
|
||||
|
||||
header_columns = page.select(".summary #results-table-head th")
|
||||
- assert_that(header_columns).is_length(3)
|
||||
+ assert len(header_columns) == 3
|
||||
|
||||
row_columns = page.select_one(".summary .results-table-row").select(
|
||||
"td:not(.extra)"
|
||||
)
|
||||
- assert_that(row_columns).is_length(3)
|
||||
+ assert len(row_columns) == 3
|
||||
|
||||
@pytest.mark.parametrize("no_capture", ["", "-s"])
|
||||
def test_standard_streams(self, pytester, no_capture):
|
||||
@@ -735,11 +704,11 @@ class TestHTML:
|
||||
for when in ["setup", "call", "teardown"]:
|
||||
for stream in ["stdout", "stderr"]:
|
||||
if no_capture:
|
||||
- assert_that(log).does_not_match(f"- Captured {stream} {when} -")
|
||||
- assert_that(log).does_not_match(f"this is {when} {stream}")
|
||||
+ assert not re.match(f"- Captured {stream} {when} -", log)
|
||||
+ assert not re.match(f"this is {when} {stream}", log)
|
||||
else:
|
||||
- assert_that(log).matches(f"- Captured {stream} {when} -")
|
||||
- assert_that(log).matches(f"this is {when} {stream}")
|
||||
+ assert re.match(f"- Captured {stream} {when} -", log)
|
||||
+ assert re.match(f"this is {when} {stream}", log)
|
||||
|
||||
|
||||
class TestLogCapturing:
|
||||
@@ -787,7 +756,7 @@ class TestLogCapturing:
|
||||
|
||||
log = get_log(page)
|
||||
for when in ["setup", "test", "teardown"]:
|
||||
- assert_that(log).matches(self.LOG_LINE_REGEX.format(when))
|
||||
+ assert re.match(self.LOG_LINE_REGEX.format(when), log)
|
||||
|
||||
@pytest.mark.usefixtures("log_cli")
|
||||
def test_setup_error(self, test_file, pytester):
|
||||
@@ -796,9 +765,9 @@ class TestLogCapturing:
|
||||
assert_results(page, error=1)
|
||||
|
||||
log = get_log(page)
|
||||
- assert_that(log).matches(self.LOG_LINE_REGEX.format("setup"))
|
||||
- assert_that(log).does_not_match(self.LOG_LINE_REGEX.format("test"))
|
||||
- assert_that(log).does_not_match(self.LOG_LINE_REGEX.format("teardown"))
|
||||
+ assert re.match(self.LOG_LINE_REGEX.format("setup"), log)
|
||||
+ assert not re.match(self.LOG_LINE_REGEX.format("test"), log)
|
||||
+ assert not re.match(self.LOG_LINE_REGEX.format("teardown"), log)
|
||||
|
||||
@pytest.mark.usefixtures("log_cli")
|
||||
def test_test_fails(self, test_file, pytester):
|
||||
@@ -808,7 +777,7 @@ class TestLogCapturing:
|
||||
|
||||
log = get_log(page)
|
||||
for when in ["setup", "test", "teardown"]:
|
||||
- assert_that(log).matches(self.LOG_LINE_REGEX.format(when))
|
||||
+ assert re.match(self.LOG_LINE_REGEX.format(when), log)
|
||||
|
||||
@pytest.mark.usefixtures("log_cli")
|
||||
@pytest.mark.parametrize(
|
||||
@@ -822,7 +791,7 @@ class TestLogCapturing:
|
||||
for test_name in ["test_logging", "test_logging::teardown"]:
|
||||
log = get_log(page, test_name)
|
||||
for when in ["setup", "test", "teardown"]:
|
||||
- assert_that(log).matches(self.LOG_LINE_REGEX.format(when))
|
||||
+ assert re.match(self.LOG_LINE_REGEX.format(when), log)
|
||||
|
||||
def test_no_log(self, test_file, pytester):
|
||||
pytester.makepyfile(test_file(assertion=True))
|
||||
@@ -830,9 +799,9 @@ class TestLogCapturing:
|
||||
assert_results(page, passed=1)
|
||||
|
||||
log = get_log(page, "test_logging")
|
||||
- assert_that(log).contains("No log output captured.")
|
||||
+ assert "No log output captured." in log
|
||||
for when in ["setup", "test", "teardown"]:
|
||||
- assert_that(log).does_not_match(self.LOG_LINE_REGEX.format(when))
|
||||
+ assert not re.match(self.LOG_LINE_REGEX.format(when), log)
|
||||
|
||||
@pytest.mark.usefixtures("log_cli")
|
||||
def test_rerun(self, test_file, pytester):
|
||||
@@ -843,8 +812,8 @@ class TestLogCapturing:
|
||||
assert_results(page, failed=1, rerun=2)
|
||||
|
||||
log = get_log(page)
|
||||
- assert_that(log.count("Captured log setup")).is_equal_to(3)
|
||||
- assert_that(log.count("Captured log teardown")).is_equal_to(5)
|
||||
+ assert log.count("Captured log setup") == 3
|
||||
+ assert log.count("Captured log teardown") == 5
|
||||
|
||||
|
||||
class TestCollapsedQueryParam:
|
||||
@@ -871,9 +840,9 @@ class TestCollapsedQueryParam:
|
||||
page = run(pytester)
|
||||
assert_results(page, passed=1, failed=1, error=1)
|
||||
|
||||
- assert_that(is_collapsed(page, "test_pass")).is_true()
|
||||
- assert_that(is_collapsed(page, "test_fail")).is_false()
|
||||
- assert_that(is_collapsed(page, "test_error::setup")).is_false()
|
||||
+ assert is_collapsed(page, "test_pass")
|
||||
+ assert not is_collapsed(page, "test_fail")
|
||||
+ assert not is_collapsed(page, "test_error::setup")
|
||||
|
||||
@pytest.mark.parametrize("param", ["failed,error", "FAILED,eRRoR"])
|
||||
def test_specified(self, pytester, test_file, param):
|
||||
@@ -881,9 +850,9 @@ class TestCollapsedQueryParam:
|
||||
page = run(pytester, query_params={"collapsed": param})
|
||||
assert_results(page, passed=1, failed=1, error=1)
|
||||
|
||||
- assert_that(is_collapsed(page, "test_pass")).is_false()
|
||||
- assert_that(is_collapsed(page, "test_fail")).is_true()
|
||||
- assert_that(is_collapsed(page, "test_error::setup")).is_true()
|
||||
+ assert not is_collapsed(page, "test_pass")
|
||||
+ assert is_collapsed(page, "test_fail")
|
||||
+ assert is_collapsed(page, "test_error::setup")
|
||||
|
||||
def test_all(self, pytester, test_file):
|
||||
pytester.makepyfile(test_file)
|
||||
@@ -891,7 +860,7 @@ class TestCollapsedQueryParam:
|
||||
assert_results(page, passed=1, failed=1, error=1)
|
||||
|
||||
for test_name in ["test_pass", "test_fail", "test_error::setup"]:
|
||||
- assert_that(is_collapsed(page, test_name)).is_true()
|
||||
+ assert is_collapsed(page, test_name)
|
||||
|
||||
@pytest.mark.parametrize("param", ["", 'collapsed=""', "collapsed=''"])
|
||||
def test_falsy(self, pytester, test_file, param):
|
||||
@@ -899,9 +868,9 @@ class TestCollapsedQueryParam:
|
||||
page = run(pytester, query_params={"collapsed": param})
|
||||
assert_results(page, passed=1, failed=1, error=1)
|
||||
|
||||
- assert_that(is_collapsed(page, "test_pass")).is_false()
|
||||
- assert_that(is_collapsed(page, "test_fail")).is_false()
|
||||
- assert_that(is_collapsed(page, "test_error::setup")).is_false()
|
||||
+ assert not is_collapsed(page, "test_pass")
|
||||
+ assert not is_collapsed(page, "test_fail")
|
||||
+ assert not is_collapsed(page, "test_error::setup")
|
||||
|
||||
@pytest.mark.parametrize("param", ["failed,error", "FAILED,eRRoR"])
|
||||
def test_render_collapsed(self, pytester, test_file, param):
|
||||
@@ -915,9 +884,9 @@ class TestCollapsedQueryParam:
|
||||
page = run(pytester)
|
||||
assert_results(page, passed=1, failed=1, error=1)
|
||||
|
||||
- assert_that(is_collapsed(page, "test_pass")).is_false()
|
||||
- assert_that(is_collapsed(page, "test_fail")).is_true()
|
||||
- assert_that(is_collapsed(page, "test_error::setup")).is_true()
|
||||
+ assert not is_collapsed(page, "test_pass")
|
||||
+ assert is_collapsed(page, "test_fail")
|
||||
+ assert is_collapsed(page, "test_error::setup")
|
||||
|
||||
def test_render_collapsed_precedence(self, pytester, test_file):
|
||||
pytester.makeini(
|
||||
@@ -934,7 +903,7 @@ class TestCollapsedQueryParam:
|
||||
page = run(pytester, query_params={"collapsed": "skipped"})
|
||||
assert_results(page, passed=1, failed=1, error=1, skipped=1)
|
||||
|
||||
- assert_that(is_collapsed(page, "test_pass")).is_false()
|
||||
- assert_that(is_collapsed(page, "test_fail")).is_false()
|
||||
- assert_that(is_collapsed(page, "test_error::setup")).is_false()
|
||||
- assert_that(is_collapsed(page, "test_skip")).is_true()
|
||||
+ assert not is_collapsed(page, "test_pass")
|
||||
+ assert not is_collapsed(page, "test_fail")
|
||||
+ assert not is_collapsed(page, "test_error::setup")
|
||||
+ assert is_collapsed(page, "test_skip")
|
||||
Index: pytest_html-4.0.0rc5/testing/test_unit.py
|
||||
===================================================================
|
||||
--- pytest_html-4.0.0rc5.orig/testing/test_unit.py
|
||||
+++ pytest_html-4.0.0rc5/testing/test_unit.py
|
||||
@@ -4,7 +4,6 @@ import sys
|
||||
|
||||
import pkg_resources
|
||||
import pytest
|
||||
-from assertpy import assert_that
|
||||
|
||||
pytest_plugins = ("pytester",)
|
||||
|
||||
@@ -112,7 +111,8 @@ def test_custom_css(pytester, css_file_p
|
||||
|
||||
with open(str(path)) as f:
|
||||
css = f.read()
|
||||
- assert_that(css).contains("* " + str(css_file_path)).contains("* two.css")
|
||||
+ assert "* " + str(css_file_path) in css
|
||||
+ assert "* two.css" in css
|
||||
|
||||
|
||||
def test_custom_css_selfcontained(pytester, css_file_path, expandvar):
|
||||
@@ -131,4 +131,6 @@ def test_custom_css_selfcontained(pytest
|
||||
|
||||
with open(pytester.path / "report.html") as f:
|
||||
html = f.read()
|
||||
- assert_that(html).contains("* " + str(css_file_path)).contains("* two.css")
|
||||
+
|
||||
+ assert "* " + str(css_file_path) in html
|
||||
+ assert "* two.css" in html
|
3
node_modules.tar.gz
Normal file
3
node_modules.tar.gz
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1b899bcd015a5b31734e932627e10db12d286bdb5d6b63a6ee5c88b77137fbbc
|
||||
size 12005630
|
@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c4e2f4bb0bffc437f51ad2174a8a3e71df81bbc2f6894604e604af18fbe687c3
|
||||
size 41955
|
3
pytest_html-4.0.0rc5.tar.gz
Normal file
3
pytest_html-4.0.0rc5.tar.gz
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:dc222ee5e2df1480e982e88b726cae40c1bb7fd688f1ed14bac5a069e4843393
|
||||
size 145841
|
@ -1,3 +1,10 @@
|
||||
-------------------------------------------------------------------
|
||||
Wed Aug 2 10:56:05 UTC 2023 - Daniel Garcia <daniel.garcia@suse.com>
|
||||
|
||||
- Update to 4.0.0rc5
|
||||
- Add drop-assertpy-dep.patch to remove assertpy dependency
|
||||
- Add vendor-npm.patch to vendor npm build requirements
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue May 9 11:37:55 UTC 2023 - Johannes Kastl <kastl@b1-systems.de>
|
||||
|
||||
|
@ -18,26 +18,31 @@
|
||||
|
||||
%{?sle15_python_module_pythons}
|
||||
Name: python-pytest-html
|
||||
Version: 3.2.0
|
||||
Version: 4.0.0rc5
|
||||
Release: 0
|
||||
Summary: Pytest plugin for generating HTML reports
|
||||
License: MPL-2.0
|
||||
URL: https://github.com/pytest-dev/pytest-html
|
||||
Source: https://files.pythonhosted.org/packages/source/p/pytest-html/pytest-html-%{version}.tar.gz
|
||||
BuildRequires: %{python_module pytest >= 5.0}
|
||||
BuildRequires: %{python_module pytest-metadata}
|
||||
BuildRequires: %{python_module py}
|
||||
BuildRequires: %{python_module setuptools_scm}
|
||||
BuildRequires: %{python_module setuptools}
|
||||
Source: https://files.pythonhosted.org/packages/source/p/pytest-html/pytest_html-%{version}.tar.gz
|
||||
Source1: node_modules.tar.gz
|
||||
Patch0: vendor-npm.patch
|
||||
Patch1: drop-assertpy-dep.patch
|
||||
BuildRequires: %{python_module hatch-vcs}
|
||||
BuildRequires: %{python_module hatchling}
|
||||
BuildRequires: %{python_module pip}
|
||||
BuildRequires: %{python_module wheel}
|
||||
BuildRequires: fdupes
|
||||
BuildRequires: npm
|
||||
BuildRequires: python-rpm-macros
|
||||
Requires: python-ansi2html
|
||||
Requires: python-py
|
||||
Requires: python-pytest >= 5.0
|
||||
Requires: python-pytest-metadata
|
||||
Requires: python-Jinja2 >= 3.0.0
|
||||
Requires: python-pytest >= 7.0.0
|
||||
Requires: python-pytest-metadata >= 3.0.0
|
||||
BuildArch: noarch
|
||||
# SECTION test requirements
|
||||
BuildRequires: %{python_module ansi2html}
|
||||
BuildRequires: %{python_module Jinja2 >= 3.0.0}
|
||||
BuildRequires: %{python_module beautifulsoup4}
|
||||
BuildRequires: %{python_module pytest >= 7.0.0}
|
||||
BuildRequires: %{python_module pytest-metadata >= 3.0.0}
|
||||
BuildRequires: %{python_module pytest-mock}
|
||||
BuildRequires: %{python_module pytest-rerunfailures}
|
||||
BuildRequires: %{python_module pytest-xdist}
|
||||
@ -48,18 +53,18 @@ BuildRequires: %{python_module pytest-xdist}
|
||||
A plugin for pytest that generates a HTML report for test results.
|
||||
|
||||
%prep
|
||||
%setup -q -n pytest-html-%{version}
|
||||
%autosetup -p1 -n pytest_html-%{version} -a1
|
||||
|
||||
%build
|
||||
%python_build
|
||||
%pyproject_wheel
|
||||
|
||||
%install
|
||||
%python_install
|
||||
%pyproject_install
|
||||
%python_expand %fdupes %{buildroot}%{$python_sitelib}
|
||||
|
||||
%check
|
||||
export LANG=en_US.UTF-8
|
||||
%pytest
|
||||
%pytest --ignore testing/test_integration.py --ignore testing/test_e2e.py
|
||||
|
||||
%files %{python_files}
|
||||
%license LICENSE
|
||||
|
29
vendor-npm.patch
Normal file
29
vendor-npm.patch
Normal file
@ -0,0 +1,29 @@
|
||||
Index: pytest_html-4.0.0rc5/package.json
|
||||
===================================================================
|
||||
--- pytest_html-4.0.0rc5.orig/package.json
|
||||
+++ pytest_html-4.0.0rc5/package.json
|
||||
@@ -8,14 +8,5 @@
|
||||
"all": "npm run lint && npm run unit && npm run build:css && npm run build:jsapp"
|
||||
},
|
||||
"devDependencies": {
|
||||
- "browserify": "^17.0.0",
|
||||
- "chai": "^4.3.6",
|
||||
- "eslint": "^8.20.0",
|
||||
- "eslint-config-google": "^0.14.0",
|
||||
- "mocha": "^10.0.0",
|
||||
- "mock-local-storage": "^1.1.24",
|
||||
- "nyc": "^15.1.0",
|
||||
- "sass": "^1.52.3",
|
||||
- "sinon": "^14.0.0"
|
||||
}
|
||||
}
|
||||
Index: pytest_html-4.0.0rc5/scripts/npm.py
|
||||
===================================================================
|
||||
--- pytest_html-4.0.0rc5.orig/scripts/npm.py
|
||||
+++ pytest_html-4.0.0rc5/scripts/npm.py
|
||||
@@ -5,5 +5,4 @@ from hatchling.builders.hooks.plugin.int
|
||||
|
||||
class NpmBuildHook(BuildHookInterface):
|
||||
def initialize(self, version, build_data):
|
||||
- subprocess.check_output("npm ci", shell=True)
|
||||
subprocess.check_output("npm run build", shell=True)
|
Loading…
Reference in New Issue
Block a user