2020-11-09 12:23:58 +08:00
|
|
|
# SPDX-License-Identifier: LGPL-2.1-or-later
|
2017-11-19 01:32:01 +08:00
|
|
|
|
2020-07-14 17:35:29 +08:00
|
|
|
if install_tests
|
2023-03-22 20:42:40 +08:00
|
|
|
foreach subdir : [
|
|
|
|
'auxv',
|
|
|
|
'journal-data',
|
|
|
|
'units',
|
|
|
|
'test-execute',
|
|
|
|
'test-fstab-generator',
|
|
|
|
'test-path',
|
|
|
|
'test-path-util',
|
|
|
|
'test-umount',
|
2023-03-29 18:15:24 +08:00
|
|
|
'test-network',
|
2023-03-22 20:42:40 +08:00
|
|
|
'test-network-generator-conversion',
|
|
|
|
'testsuite-03.units',
|
|
|
|
'testsuite-04.units',
|
|
|
|
'testsuite-06.units',
|
2023-05-10 19:41:03 +08:00
|
|
|
'testsuite-07.units',
|
2023-03-22 20:42:40 +08:00
|
|
|
'testsuite-10.units',
|
|
|
|
'testsuite-11.units',
|
|
|
|
'testsuite-16.units',
|
|
|
|
'testsuite-28.units',
|
|
|
|
'testsuite-30.units',
|
|
|
|
'testsuite-52.units',
|
|
|
|
'testsuite-63.units',
|
|
|
|
'testsuite-80.units',
|
|
|
|
]
|
|
|
|
install_subdir(subdir,
|
|
|
|
exclude_files : '.gitattributes',
|
|
|
|
install_dir : testdata_dir)
|
|
|
|
endforeach
|
2019-12-08 18:24:39 +08:00
|
|
|
|
2021-05-19 20:14:58 +08:00
|
|
|
install_data(kbd_model_map,
|
|
|
|
install_dir : testdata_dir + '/test-keymap-util')
|
|
|
|
|
2023-03-02 22:41:17 +08:00
|
|
|
if conf.get('HAVE_ZSTD') == 1 and efi_arch != ''
|
2021-12-10 18:55:38 +08:00
|
|
|
install_subdir('test-bcd',
|
|
|
|
exclude_files : '.gitattributes',
|
|
|
|
install_dir : testdata_dir)
|
|
|
|
endif
|
2020-07-14 17:35:29 +08:00
|
|
|
if conf.get('ENABLE_RESOLVE') == 1
|
|
|
|
install_subdir('test-resolve',
|
2021-12-10 06:16:19 +08:00
|
|
|
exclude_files : '.gitattributes',
|
2020-07-14 17:35:29 +08:00
|
|
|
install_dir : testdata_dir)
|
|
|
|
endif
|
2019-12-10 23:05:48 +08:00
|
|
|
|
2020-07-14 17:35:29 +08:00
|
|
|
install_data('create-busybox-container',
|
|
|
|
install_mode : 'rwxr-xr-x',
|
|
|
|
install_dir : testdata_dir)
|
2023-03-17 21:18:50 +08:00
|
|
|
|
|
|
|
# The unit tests implemented as shell scripts expect to find testdata/
|
|
|
|
# in the directory where they are stored.
|
|
|
|
meson.add_install_script(meson_make_symlink,
|
|
|
|
testdata_dir,
|
|
|
|
unittestsdir / 'testdata')
|
2017-04-14 08:47:20 +08:00
|
|
|
endif
|
|
|
|
|
2022-11-16 16:49:30 +08:00
|
|
|
test_bootctl_json_sh = find_program('test-bootctl-json.sh')
|
2022-01-06 19:12:27 +08:00
|
|
|
test_fstab_generator_sh = find_program('test-fstab-generator.sh')
|
2019-12-10 20:04:39 +08:00
|
|
|
test_network_generator_conversion_sh = find_program('test-network-generator-conversion.sh')
|
2022-03-07 19:15:42 +08:00
|
|
|
test_systemctl_enable_sh = find_program('test-systemctl-enable.sh')
|
2021-01-01 03:56:02 +08:00
|
|
|
test_systemd_tmpfiles_py = find_program('test-systemd-tmpfiles.py')
|
2021-04-15 14:20:31 +08:00
|
|
|
hwdb_test_sh = find_program('hwdb-test.sh')
|
2021-01-01 03:56:02 +08:00
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2020-09-26 17:58:24 +08:00
|
|
|
test_sysusers_sh = configure_file(
|
|
|
|
input : 'test-sysusers.sh.in',
|
|
|
|
output : 'test-sysusers.sh',
|
2021-05-16 21:57:55 +08:00
|
|
|
configuration : conf)
|
2020-09-26 17:58:24 +08:00
|
|
|
if install_tests and conf.get('ENABLE_SYSUSERS') == 1
|
|
|
|
install_data(test_sysusers_sh,
|
2023-01-26 23:07:17 +08:00
|
|
|
install_dir : unittestsdir)
|
2020-09-26 17:58:24 +08:00
|
|
|
install_subdir('test-sysusers',
|
2021-12-10 06:16:19 +08:00
|
|
|
exclude_files : '.gitattributes',
|
2020-09-26 17:58:24 +08:00
|
|
|
install_dir : testdata_dir)
|
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2022-05-18 16:40:54 +08:00
|
|
|
test_compare_versions_sh = files('test-compare-versions.sh')
|
|
|
|
if install_tests
|
|
|
|
install_data(test_compare_versions_sh,
|
2023-01-26 23:07:17 +08:00
|
|
|
install_dir : unittestsdir)
|
2022-05-18 16:40:54 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2017-11-22 19:42:28 +08:00
|
|
|
rule_syntax_check_py = find_program('rule-syntax-check.py')
|
2018-09-12 17:08:49 +08:00
|
|
|
if want_tests != 'false'
|
|
|
|
test('rule-syntax-check',
|
|
|
|
rule_syntax_check_py,
|
2022-05-02 17:12:34 +08:00
|
|
|
suite : 'dist-check',
|
2018-09-12 17:08:49 +08:00
|
|
|
args : all_rules)
|
|
|
|
endif
|
2017-11-22 19:42:28 +08:00
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
tests: add a runner for installed tests
We have "installed tests", but don't provide an easy way to run them.
The protocol is very simple: each test must return 0 for success, 77 means
"skipped", anything else is an error. In addition, we want to print test
output only if the test failed.
I wrote this simple script. It is pretty basic, but implements the functions
listed above. Since it is written in python it should be easy to add option
parsing (like running only specific tests, or running unsafe tests, etc.)
I looked at the following alternatives:
- Ubuntu root-unittests: this works, but just dumps all output to the terminal,
has no coloring.
- @ssahani's test runner [2]
It uses the unittest library and the test suite was implented as a class, and
doesn't implement any of the functions listed above.
- cram [3,4]
cram runs our tests, but does not understand the "ignore the output" part,
has not support for our magic skip code (it uses hardcoded 80 instead),
and seems dead upstream.
- meson test
Here the idea would be to provide an almost-empty meson.build file under
/usr/lib/systemd/tests/ that would just define all the tests. This would
allow us to reuse the test runner we use normally. Unfortunately meson requires
a build directory and configuration to be done before running tests. This
would be possible, but seems a lot of effort to just run a few binaries.
[1] https://salsa.debian.org/systemd-team/systemd/blob/242c96addb06480ec9cd75248a5660f37a17b4b9/debian/tests/root-unittests
[2] https://github.com/systemd/systemd-fedora-ci/blob/master/upstream/systemd-upstream-tests.py
[3] https://bitheap.org/cram/
[4] https://pypi.org/project/pytest-cram/
Fixes #10069.
2018-09-20 22:34:14 +08:00
|
|
|
if install_tests
|
|
|
|
install_data('run-unit-tests.py',
|
|
|
|
install_mode : 'rwxr-xr-x',
|
|
|
|
install_dir : testsdir)
|
2021-10-21 01:43:34 +08:00
|
|
|
|
2022-01-06 19:12:27 +08:00
|
|
|
install_data('test-fstab-generator.sh',
|
|
|
|
install_mode : 'rwxr-xr-x',
|
2023-01-26 23:07:17 +08:00
|
|
|
install_dir : unittestsdir)
|
2022-01-06 19:12:27 +08:00
|
|
|
|
2022-01-06 18:04:54 +08:00
|
|
|
install_data('test-network-generator-conversion.sh',
|
|
|
|
install_mode : 'rwxr-xr-x',
|
2023-01-26 23:07:17 +08:00
|
|
|
install_dir : unittestsdir)
|
tests: add a runner for installed tests
We have "installed tests", but don't provide an easy way to run them.
The protocol is very simple: each test must return 0 for success, 77 means
"skipped", anything else is an error. In addition, we want to print test
output only if the test failed.
I wrote this simple script. It is pretty basic, but implements the functions
listed above. Since it is written in python it should be easy to add option
parsing (like running only specific tests, or running unsafe tests, etc.)
I looked at the following alternatives:
- Ubuntu root-unittests: this works, but just dumps all output to the terminal,
has no coloring.
- @ssahani's test runner [2]
It uses the unittest library and the test suite was implented as a class, and
doesn't implement any of the functions listed above.
- cram [3,4]
cram runs our tests, but does not understand the "ignore the output" part,
has not support for our magic skip code (it uses hardcoded 80 instead),
and seems dead upstream.
- meson test
Here the idea would be to provide an almost-empty meson.build file under
/usr/lib/systemd/tests/ that would just define all the tests. This would
allow us to reuse the test runner we use normally. Unfortunately meson requires
a build directory and configuration to be done before running tests. This
would be possible, but seems a lot of effort to just run a few binaries.
[1] https://salsa.debian.org/systemd-team/systemd/blob/242c96addb06480ec9cd75248a5660f37a17b4b9/debian/tests/root-unittests
[2] https://github.com/systemd/systemd-fedora-ci/blob/master/upstream/systemd-upstream-tests.py
[3] https://bitheap.org/cram/
[4] https://pypi.org/project/pytest-cram/
Fixes #10069.
2018-09-20 22:34:14 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2023-05-05 20:46:34 +08:00
|
|
|
sys_script_py = files('sys-script.py')
|
|
|
|
test_udev_py = files('test-udev.py')
|
2017-09-26 19:39:43 +08:00
|
|
|
|
2023-05-05 20:46:34 +08:00
|
|
|
if install_tests
|
|
|
|
install_data(
|
|
|
|
sys_script_py,
|
|
|
|
test_udev_py,
|
|
|
|
install_dir : unittestsdir)
|
test: rewrite udev-test.pl in Python
I tried to keep this a 1:1 rewrite with the same field names.
Nevertheless, some changes were made:
- exp_add_error and exp_rem_error are dropped. Those fields meant that
"./test-udev add <devpath>" actually succeeded, but symlinks were not
created, and exp_links was ignored and could contain bogus content.
Instead, exp_links and not_exp_links are adjusted to not contain
garbage and the tests check that "./test-udev add" succeeds and that
the links are as expected from exp_links and not_exp_links.
- cleanup was only used in one rule, and that rule was expected to fail,
so cleanup wasn't actually necessary. So the cleanup field and the
logic to call cleanup from individual tests is removed.
- a bunch of fields were set, but didn't seem to be connected to any
implementation: not_exp_name, not_exp_test.
e62acc3159935781f05fa59c48e5a74e85c61ce2 did a rewrite of some of the
tests and it seems that not_exp_test was added by mistake and
not_exp_name was left behind by mistake.
In Python, the field list is declared in the class, so it's harder to
assign an unused attribute. Those uses were converted to not_exp_links.
- in most rules, r"""…""" is used, so that escaping is not necessary.
- the logic to generate devices was only used in one place, and the
generator function also had provisions to handle arguments that were
never given. all_block_devs() is made much simpler.
- Descriptions that started with a capital letter were shortened
and lowercased.
- no special test case counting is done. pytest just counts the cases
(Rules objects).
- the output for failures is also removed. If something goes wrong, the
user can use pytest --pdb or such to debug the issue.
- perl version used a semaphore to manage udev runners, and would fork,
optionally wait a bit, and then start the runner. In the python
version, we just spawn them all and wait for them to exit. It's not
very convenient to call fork() from python, so instead the runner
was modified (in previous commit) to wait.
The test can be called as:
(cd build && sudo pytest -v ../test/udev-test.py)
sudo meson test -C build udev-test.py -v
I think this generally provides functionality that is close to the perl
version. It seems some of the checks are now more fully implemented.
Support for strace/gdb/valgrind is missing.
Runtime goes down: 8.36 s → 5.78 s.
2023-05-05 04:40:38 +08:00
|
|
|
endif
|
|
|
|
|
2020-12-16 19:21:43 +08:00
|
|
|
############################################################
|
|
|
|
|
2022-03-04 03:30:43 +08:00
|
|
|
rpm = find_program('rpm', required : false)
|
|
|
|
rpmspec = find_program('rpmspec', required : false)
|
|
|
|
test_rpm_macros = find_program('test-rpm-macros.sh')
|
|
|
|
|
|
|
|
if rpm.found() and rpmspec.found()
|
|
|
|
if want_tests != 'false'
|
|
|
|
test('test-rpm-macros',
|
|
|
|
test_rpm_macros,
|
2022-05-02 17:12:34 +08:00
|
|
|
suite : 'dist-check',
|
2022-03-04 03:30:43 +08:00
|
|
|
args : [project_build_root])
|
|
|
|
endif
|
|
|
|
else
|
|
|
|
message('Skipping test-rpm-macros since rpm and/or rpmspec are not available')
|
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2020-12-16 19:21:43 +08:00
|
|
|
if want_tests != 'false' and dmi_arches.contains(host_machine.cpu_family())
|
2020-12-02 19:40:42 +08:00
|
|
|
udev_dmi_memory_id_test = find_program('udev-dmi-memory-id-test.sh')
|
2020-12-16 19:21:43 +08:00
|
|
|
|
2023-03-02 05:54:06 +08:00
|
|
|
if git.found() and fs.is_dir(project_source_root / '.git')
|
2020-12-16 19:21:43 +08:00
|
|
|
out = run_command(
|
2021-05-14 20:16:17 +08:00
|
|
|
env, '-u', 'GIT_WORK_TREE',
|
|
|
|
git, '--git-dir=@0@/.git'.format(project_source_root),
|
2022-01-11 17:56:22 +08:00
|
|
|
'ls-files', ':/test/dmidecode-dumps/*.bin',
|
|
|
|
check: true)
|
2020-12-16 19:21:43 +08:00
|
|
|
else
|
|
|
|
out = run_command(
|
2022-05-15 23:11:24 +08:00
|
|
|
sh, '-c', 'cd "$1"; echo test/dmidecode-dumps/*.bin', '_', project_source_root,
|
2022-01-11 17:56:22 +08:00
|
|
|
check: true)
|
2020-12-16 19:21:43 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
foreach p : out.stdout().split()
|
2021-07-28 01:32:35 +08:00
|
|
|
source = project_source_root / p
|
2020-12-16 19:21:43 +08:00
|
|
|
name = 'dmidecode_' + p.split('/')[-1].split('.')[0]
|
|
|
|
|
|
|
|
test(name,
|
|
|
|
udev_dmi_memory_id_test,
|
2022-05-02 17:12:34 +08:00
|
|
|
suite : 'dist-check',
|
2022-01-21 22:28:23 +08:00
|
|
|
args : [udev_prog_paths['dmi_memory_id'].full_path(),
|
2021-07-27 23:48:53 +08:00
|
|
|
source,
|
2022-01-21 22:28:23 +08:00
|
|
|
source + '.txt'],
|
|
|
|
depends : udev_prog_paths['dmi_memory_id'])
|
2020-12-16 19:21:43 +08:00
|
|
|
endforeach
|
2020-12-02 19:40:42 +08:00
|
|
|
endif
|
|
|
|
|
2018-07-08 00:09:21 +08:00
|
|
|
subdir('fuzz')
|