tests/performance: add "factor" argument to performance test

By default, the test estimates a run factor for each test. This means,
if you run performance under `perf`, the results are not comparable,
as the run time depends on the estimated factor.

Add an option, to set a fixed factor.

Of course, there is only one factor argument for all tests.  Quite
possibly, you would want to run each test individually with a factor
appropriate for the test. On the other hand, all tests should be tuned
so that the same factor gives a similar test duration. So this may not
be a concern, or the tests should be adjusted. In any case, the option
is most useful when running only one test explicitly.

You can get a suitable factor by running the test once with "--verbose".

Another use case is if you run the benchmark under valgrind. Valgrind
slows down the run so much, that the estimated factor would be quite
off. As a result, the chosen code paths are different from the real run.
By setting the factor, the timing measurements don't affect the executed
code.
This commit is contained in:
Thomas Haller 2024-03-05 09:53:03 +01:00 committed by Philip Withnall
parent e5e3c37d22
commit 29a69d5a1b

View File

@ -33,6 +33,7 @@
static gboolean verbose = FALSE;
static gboolean quiet = FALSE;
static int test_length = DEFAULT_TEST_TIME;
static double test_factor = 0;
static GOptionEntry cmd_entries[] = {
{"verbose", 'v', 0, G_OPTION_ARG_NONE, &verbose,
@ -41,6 +42,8 @@ static GOptionEntry cmd_entries[] = {
"Print extra information", NULL},
{"seconds", 's', 0, G_OPTION_ARG_INT, &test_length,
"Time to run each test in seconds", NULL},
{"factor", 'f', 0, G_OPTION_ARG_DOUBLE, &test_factor,
"Use a fixed factor for sample runs (also $GLIB_PERFORMANCE_FACTOR)", NULL},
G_OPTION_ENTRY_NULL
};
@ -101,24 +104,32 @@ run_test (PerformanceTest *test)
g_print ("Estimating round time\n");
}
/* Estimate time for one run by doing a few test rounds */
min_elapsed = 0;
for (i = 0; i < ESTIMATE_ROUND_TIME_N_RUNS; i++)
if (test_factor > 0)
{
test->init (test, data, 1.0);
g_timer_start (timer);
test->run (test, data);
g_timer_stop (timer);
test->finish (test, data);
elapsed = g_timer_elapsed (timer, NULL);
if (i == 0)
min_elapsed = elapsed;
else
min_elapsed = MIN (min_elapsed, elapsed);
factor = test_factor;
}
else
{
/* Estimate time for one run by doing a few test rounds. */
for (i = 0; i < ESTIMATE_ROUND_TIME_N_RUNS; i++)
{
test->init (test, data, 1.0);
g_timer_start (timer);
test->run (test, data);
g_timer_stop (timer);
test->finish (test, data);
factor = TARGET_ROUND_TIME / min_elapsed;
elapsed = g_timer_elapsed (timer, NULL);
if (i == 0)
min_elapsed = elapsed;
else
min_elapsed = MIN (min_elapsed, elapsed);
}
factor = TARGET_ROUND_TIME / min_elapsed;
}
if (verbose)
g_print ("Uncorrected round time: %.4f msecs, correction factor %.2f\n", 1000*min_elapsed, factor);
@ -1550,8 +1561,14 @@ main (int argc,
PerformanceTest *test;
GOptionContext *context;
GError *error = NULL;
const char *str;
int i;
if ((str = g_getenv ("GLIB_PERFORMANCE_FACTOR")) && str[0])
{
test_factor = g_strtod (str, NULL);
}
context = g_option_context_new ("GObject performance tests");
g_option_context_add_main_entries (context, cmd_entries, NULL);
if (!g_option_context_parse (context, &argc, &argv, &error))
@ -1560,6 +1577,12 @@ main (int argc,
return 1;
}
if (test_factor < 0)
{
g_printerr ("%s: test factor must be positive\n", argv[0]);
return 1;
}
if (argc > 1)
{
for (i = 1; i < argc; i++)