meson, ci: Support tests that can fail under certain conditions

We have tests that are failing in some environments, but it's
difficult to handle them because:
 - for some environments we just allow all the tests to fail: DANGEROUS
 - when we don't allow failures we have flacky tests: A CI pain

So, to avoid this and ensure that:
 - New failing tests are tracked in all platforms
 - gitlab integration on tests reports is working
 - coverage is reported also for failing tests

Add support for `can_fail` keyword on tests that would mark the test as
part of the `failing` test suite.
Not adding the suite directly when defining the tests as this is
definitely simpler and allows to define conditions more clearly (see next
commits).

Now, add a default test setup that does not run the failing and flaky tests
by default (not to bother distributors with testing well-known issues) and
eventually run all the tests in CI:
 - Non-flaky tests cannot fail in all platforms
 - Failing and Flaky tests can fail

In both cases we save the test reports so that gitlab integration is
preserved.
This commit is contained in:
Marco Trevisan (Treviño) 2022-10-19 20:08:15 +02:00
parent 9635fd4e40
commit 62dca6c1cf
10 changed files with 111 additions and 31 deletions

View File

@ -99,7 +99,9 @@ fedora-x86_64:
- lcov --config-file .lcovrc --directory _build --capture --output-file "_coverage/${CI_JOB_NAME}.lcov"
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -140,7 +142,9 @@ debian-stable-x86_64:
- .gitlab-ci/run-tests.sh
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -206,7 +210,9 @@ G_DISABLE_ASSERT:
- bash -x ./.gitlab-ci/run-tests.sh
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -281,7 +287,9 @@ cross-mingw64:
- ninja -C _build
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${env:CI_JOB_NAME}-${env:CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -308,7 +316,9 @@ msys2-mingw32:
- C:\msys64\usr\bin\bash -lc "bash -x ./.gitlab-ci/test-msys2.sh"
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${env:CI_JOB_NAME}-${env:CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -331,7 +341,9 @@ vs2017-x64:
--python.purelibdir=C:\Python37\site-packages
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${env:CI_JOB_NAME}-${env:CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -359,7 +371,9 @@ vs2017-x64-static:
--python.purelibdir=C:\Python37\site-packages
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${env:CI_JOB_NAME}-${env:CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -401,7 +415,9 @@ freebsd-12-x86_64:
- bash -x ./.gitlab-ci/run-tests.sh
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -428,7 +444,9 @@ freebsd-13-x86_64:
- bash -x ./.gitlab-ci/run-tests.sh
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week
@ -468,7 +486,9 @@ macos:
- .gitlab-ci/run-tests.sh
artifacts:
reports:
junit: "_build/meson-logs/testlog.junit.xml"
junit:
- _build/meson-logs/testlog.junit.xml
- _build/meson-logs/testlog-*.junit.xml
name: "glib-${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}"
when: always
expire_in: 1 week

View File

@ -7,5 +7,10 @@ set -e
meson test \
-C _build \
--timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" \
--no-suite flaky \
"$@"
# Run only the flaky tests, so we can log the failures but without hard failing
meson test \
-C _build \
--timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" \
"$@" --setup=unstable_tests --suite=failing --suite=flaky || true

View File

@ -18,7 +18,8 @@ python .gitlab-ci/check-missing-install-tag.py _build || goto :error
ninja -C _build || goto :error
:: FIXME: dont ignore test errors
meson test -C _build --timeout-multiplier %MESON_TEST_TIMEOUT_MULTIPLIER% --no-suite flaky
meson test -C _build --timeout-multiplier %MESON_TEST_TIMEOUT_MULTIPLIER%
meson test -C _build --timeout-multiplier %MESON_TEST_TIMEOUT_MULTIPLIER% --setup=unstable_tests --suite=failing --suite=flaky
:: FIXME: can we get code coverage support?

View File

@ -51,7 +51,9 @@ lcov \
--output-file "${DIR}/_coverage/${CI_JOB_NAME}-baseline.lcov"
# FIXME: fix the test suite
meson test --timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" --no-suite flaky || true
meson test --timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" || true
meson test --timeout-multiplier "${MESON_TEST_TIMEOUT_MULTIPLIER}" \
--setup=unstable_tests --suite=failing --suite=flaky || true
lcov \
--quiet \

View File

@ -149,9 +149,9 @@ test_extra_programs = {
'gsubprocess-testprog' : {},
}
python_tests = [
'codegen.py',
]
python_tests = {
'codegen.py' : {},
}
test_env = environment(common_test_env)
test_env.set('G_TEST_SRCDIR', meson.current_source_dir())
@ -907,6 +907,10 @@ foreach test_name, extra_args : gio_tests
local_test_env.append(var, value)
endforeach
if extra_args.get('can_fail', false)
suite += 'failing'
endif
test(test_name, exe,
env : local_test_env,
timeout : timeout,
@ -930,13 +934,19 @@ foreach program_name, extra_args : test_extra_programs
)
endforeach
foreach test_name : python_tests
foreach test_name, extra_args : python_tests
suite = ['gio', 'no-valgrind']
if extra_args.get('can_fail', false)
suite += 'failing'
endif
test(
test_name,
python,
args: ['-B', files(test_name)],
env: test_env,
suite: ['gio', 'no-valgrind'],
suite: suite,
)
if installed_tests_enabled

View File

@ -297,6 +297,11 @@ foreach test_name, extra_args : glib_tests
suite = ['glib'] + extra_args.get('suite', [])
timeout = suite.contains('slow') ? test_timeout_slow : test_timeout
if extra_args.get('can_fail', false)
suite += 'failing'
endif
test(test_name, exe,
env : test_env,
timeout : timeout,
@ -313,9 +318,9 @@ if installed_tests_enabled
)
endif
python_tests = [
'assert-msg-test.py',
]
python_tests = {
'assert-msg-test.py' : {},
}
executable('assert-msg-test', ['assert-msg-test.c'],
c_args : test_cargs,
@ -326,13 +331,19 @@ executable('assert-msg-test', ['assert-msg-test.c'],
win_subsystem : extra_args.get('win_subsystem', 'console'),
)
foreach test_name : python_tests
foreach test_name, extra_args : python_tests
suite = ['glib', 'no-valgrind']
if extra_args.get('can_fail', false)
suite += 'failing'
endif
test(
test_name,
python,
args: ['-B', files(test_name)],
env: test_env,
suite: ['glib', 'no-valgrind'],
suite: suite,
)
if installed_tests_enabled

View File

@ -96,5 +96,10 @@ foreach test_name, extra_args : gmodule_tests
suite = ['gmodule'] + extra_args.get('suite', [])
timeout = suite.contains('slow') ? test_timeout_slow : test_timeout
if extra_args.get('can_fail', false)
suite += 'failing'
endif
test(test_name, exe, env : test_env, timeout : timeout, suite : suite)
endforeach

View File

@ -123,11 +123,11 @@ if cc.get_id() != 'msvc'
gobject_tests += {'autoptr' : {}}
endif
python_tests = [
'genmarshal.py',
'gobject-query.py',
'mkenums.py',
]
python_tests = {
'genmarshal.py' : {},
'gobject-query.py' : {},
'mkenums.py' : {},
}
test_env = environment(common_test_env)
test_env.set('G_TEST_SRCDIR', meson.current_source_dir())
@ -167,6 +167,10 @@ foreach test_name, extra_args : gobject_tests
suite = ['gobject'] + extra_args.get('suite', [])
timeout = suite.contains('slow') ? test_timeout_slow : test_timeout
if extra_args.get('can_fail', false)
suite += 'failing'
endif
# FIXME: https://gitlab.gnome.org/GNOME/glib/issues/1316
# aka https://bugs.debian.org/880883
if test_name == 'closure-refcount' and ['arm', 'aarch64'].contains(host_machine.cpu_family())
@ -176,13 +180,19 @@ foreach test_name, extra_args : gobject_tests
test(test_name, exe, env : test_env, timeout : timeout, suite : suite)
endforeach
foreach test_name : python_tests
foreach test_name, extra_args : python_tests
suite = ['gobject', 'no-valgrind']
if extra_args.get('can_fail', false)
suite += 'failing'
endif
test(
test_name,
python,
args: ['-B', files(test_name)],
env: test_env,
suite: ['gobject', 'no-valgrind'],
suite: suite,
)
if installed_tests_enabled

View File

@ -40,6 +40,10 @@ foreach test_name, extra_args : gobject_tests
timeout = suite.contains('slow') ? test_timeout_slow : test_timeout
args = extra_args.get('args', [])
if extra_args.get('can_fail', false)
suite += 'failing'
endif
test(test_name, exe,
env : test_env,
timeout : timeout,

View File

@ -129,13 +129,24 @@ installed_tests_template_tap = files('tests/template-tap.test.in')
# Dont build the tests unless we can run them (either natively, in an exe wrapper, or by installing them for later use)
build_tests = get_option('tests') and (meson.can_run_host_binaries() or installed_tests_enabled)
add_test_setup('default',
is_default: true,
exclude_suites: ['flaky', 'failing'],
)
add_test_setup('unstable_tests',
# Empty test setup, used for having different results set for flaky tests
# Sadly we can't use (https://github.com/mesonbuild/meson/issues/10934):
#suites: ['flaky', 'unstable']
)
# Allow the tests to be easily run under valgrind using --setup=valgrind
valgrind = find_program('valgrind', required: false)
if valgrind.found()
suppression_file = files('tools' / 'glib.supp')
add_test_setup('valgrind',
exclude_suites: [ 'no-valgrind' ],
exclude_suites: [ 'no-valgrind', 'flaky' ],
exe_wrapper: [
valgrind,
'--tool=memcheck',
@ -2310,6 +2321,7 @@ common_test_env = [
'G_ENABLE_DIAGNOSTIC=1',
'MALLOC_CHECK_=2',
]
test_timeout = 60
test_timeout_slow = 180