2020-11-09 12:23:58 +08:00
|
|
|
# SPDX-License-Identifier: LGPL-2.1-or-later
|
2017-11-19 01:32:01 +08:00
|
|
|
|
2023-06-26 02:46:05 +08:00
|
|
|
if conf.get('ENABLE_SYSUSERS') == 1
|
|
|
|
test_sysusers_sh = configure_file(
|
|
|
|
input : 'test-sysusers.sh.in',
|
|
|
|
output : 'test-sysusers.sh',
|
|
|
|
configuration : conf)
|
|
|
|
|
|
|
|
if want_tests != 'false'
|
|
|
|
exe = executables_by_name.get('systemd-sysusers')
|
|
|
|
test('test-sysusers',
|
|
|
|
test_sysusers_sh,
|
|
|
|
# https://github.com/mesonbuild/meson/issues/2681
|
|
|
|
args : exe.full_path(),
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'sysusers')
|
2023-06-26 02:46:05 +08:00
|
|
|
|
|
|
|
if have_standalone_binaries
|
|
|
|
exe = executables_by_name.get('systemd-sysusers.standalone')
|
|
|
|
test('test-sysusers.standalone',
|
|
|
|
test_sysusers_sh,
|
|
|
|
# https://github.com/mesonbuild/meson/issues/2681
|
|
|
|
args : exe.full_path(),
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'sysusers')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
|
|
|
endif
|
|
|
|
|
|
|
|
if install_tests
|
|
|
|
install_data(test_sysusers_sh,
|
|
|
|
install_dir : unittestsdir)
|
|
|
|
install_subdir('test-sysusers',
|
|
|
|
exclude_files : '.gitattributes',
|
|
|
|
install_dir : testdata_dir)
|
|
|
|
endif
|
|
|
|
endif
|
2021-01-01 03:56:02 +08:00
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2023-06-26 02:46:05 +08:00
|
|
|
if want_tests != 'false' and conf.get('ENABLE_HWDB') == 1
|
|
|
|
hwdb_test_sh = find_program('hwdb-test.sh')
|
|
|
|
exe = executables_by_name.get('systemd-hwdb')
|
|
|
|
test('hwdb-test',
|
|
|
|
hwdb_test_sh,
|
|
|
|
suite : 'dist',
|
|
|
|
args : exe.full_path(),
|
|
|
|
depends : exe,
|
|
|
|
timeout : 90)
|
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
|
|
|
if want_tests != 'false'
|
|
|
|
test_systemctl_enable_sh = find_program('test-systemctl-enable.sh')
|
|
|
|
systemctl = executables_by_name.get('systemctl')
|
|
|
|
systemd_id128 = executables_by_name.get('systemd-id128')
|
|
|
|
test('test-systemctl-enable',
|
|
|
|
test_systemctl_enable_sh,
|
|
|
|
# https://github.com/mesonbuild/meson/issues/2681
|
|
|
|
args : [systemctl.full_path(),
|
|
|
|
systemd_id128.full_path()],
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : [systemctl, systemd_id128],
|
|
|
|
suite : 'systemctl')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
|
|
|
if want_tests != 'false' and conf.get('HAVE_SYSV_COMPAT') == 1
|
|
|
|
sysv_generator_test_py = find_program('sysv-generator-test.py')
|
|
|
|
exe = executables_by_name.get('systemd-sysv-generator')
|
|
|
|
test('sysv-generator-test',
|
|
|
|
sysv_generator_test_py,
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'sysv')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2023-10-25 22:45:06 +08:00
|
|
|
if want_tests != 'false' and conf.get('HAVE_BLKID') == 1
|
2023-06-26 02:46:05 +08:00
|
|
|
test_bootctl_json_sh = find_program('test-bootctl-json.sh')
|
|
|
|
exe = executables_by_name.get('bootctl')
|
|
|
|
test('test-bootctl-json',
|
|
|
|
test_bootctl_json_sh,
|
|
|
|
args : exe.full_path(),
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'boot')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
|
|
|
if want_tests != 'false' and conf.get('ENABLE_TMPFILES') == 1
|
|
|
|
test_systemd_tmpfiles_py = find_program('test-systemd-tmpfiles.py')
|
|
|
|
exe = executables_by_name.get('systemd-tmpfiles')
|
|
|
|
test('test-systemd-tmpfiles',
|
|
|
|
test_systemd_tmpfiles_py,
|
|
|
|
args : exe.full_path(),
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'tmpfiles')
|
2023-06-26 02:46:05 +08:00
|
|
|
|
|
|
|
if have_standalone_binaries
|
|
|
|
exe = executables_by_name.get('systemd-tmpfiles.standalone')
|
|
|
|
test('test-systemd-tmpfiles.standalone',
|
|
|
|
test_systemd_tmpfiles_py,
|
|
|
|
args : exe.full_path(),
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'tmpfiles')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
2020-09-26 17:58:24 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2022-05-18 16:40:54 +08:00
|
|
|
test_compare_versions_sh = files('test-compare-versions.sh')
|
2023-06-26 02:46:05 +08:00
|
|
|
if want_tests != 'false'
|
|
|
|
exe = executables_by_name.get('systemd-analyze')
|
|
|
|
test('test-compare-versions',
|
|
|
|
test_compare_versions_sh,
|
|
|
|
args : exe.full_path(),
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'test')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
2022-05-18 16:40:54 +08:00
|
|
|
if install_tests
|
|
|
|
install_data(test_compare_versions_sh,
|
2023-01-26 23:07:17 +08:00
|
|
|
install_dir : unittestsdir)
|
2022-05-18 16:40:54 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2017-11-22 19:42:28 +08:00
|
|
|
rule_syntax_check_py = find_program('rule-syntax-check.py')
|
2018-09-12 17:08:49 +08:00
|
|
|
if want_tests != 'false'
|
|
|
|
test('rule-syntax-check',
|
|
|
|
rule_syntax_check_py,
|
2023-05-18 23:16:33 +08:00
|
|
|
suite : 'dist',
|
2018-09-12 17:08:49 +08:00
|
|
|
args : all_rules)
|
2023-06-26 02:46:05 +08:00
|
|
|
|
|
|
|
exe = executables_by_name.get('udevadm')
|
|
|
|
test('udev-rules-check',
|
|
|
|
exe,
|
2023-06-27 00:28:32 +08:00
|
|
|
suite : 'udev',
|
2023-06-26 02:46:05 +08:00
|
|
|
args : ['verify', '--resolve-names=never', all_rules])
|
2018-09-12 17:08:49 +08:00
|
|
|
endif
|
2017-11-22 19:42:28 +08:00
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
tests: add a runner for installed tests
We have "installed tests", but don't provide an easy way to run them.
The protocol is very simple: each test must return 0 for success, 77 means
"skipped", anything else is an error. In addition, we want to print test
output only if the test failed.
I wrote this simple script. It is pretty basic, but implements the functions
listed above. Since it is written in python it should be easy to add option
parsing (like running only specific tests, or running unsafe tests, etc.)
I looked at the following alternatives:
- Ubuntu root-unittests: this works, but just dumps all output to the terminal,
has no coloring.
- @ssahani's test runner [2]
It uses the unittest library and the test suite was implented as a class, and
doesn't implement any of the functions listed above.
- cram [3,4]
cram runs our tests, but does not understand the "ignore the output" part,
has not support for our magic skip code (it uses hardcoded 80 instead),
and seems dead upstream.
- meson test
Here the idea would be to provide an almost-empty meson.build file under
/usr/lib/systemd/tests/ that would just define all the tests. This would
allow us to reuse the test runner we use normally. Unfortunately meson requires
a build directory and configuration to be done before running tests. This
would be possible, but seems a lot of effort to just run a few binaries.
[1] https://salsa.debian.org/systemd-team/systemd/blob/242c96addb06480ec9cd75248a5660f37a17b4b9/debian/tests/root-unittests
[2] https://github.com/systemd/systemd-fedora-ci/blob/master/upstream/systemd-upstream-tests.py
[3] https://bitheap.org/cram/
[4] https://pypi.org/project/pytest-cram/
Fixes #10069.
2018-09-20 22:34:14 +08:00
|
|
|
if install_tests
|
2024-11-07 23:37:13 +08:00
|
|
|
install_data('run-unit-tests.py',
|
|
|
|
install_mode : 'rwxr-xr-x',
|
|
|
|
install_dir : testsdir)
|
|
|
|
|
|
|
|
install_data('integration-test-setup.sh',
|
|
|
|
install_mode : 'rwxr-xr-x',
|
|
|
|
install_dir : testdata_dir)
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
2021-10-21 01:43:34 +08:00
|
|
|
|
2023-06-26 02:46:05 +08:00
|
|
|
test_fstab_generator_sh = find_program('test-fstab-generator.sh')
|
|
|
|
if want_tests != 'false'
|
|
|
|
exe = executables_by_name.get('systemd-fstab-generator')
|
|
|
|
test('test-fstab-generator',
|
|
|
|
test_fstab_generator_sh,
|
|
|
|
# https://github.com/mesonbuild/meson/issues/2681
|
|
|
|
args : exe.full_path(),
|
|
|
|
env : test_env,
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'fstab')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
|
|
|
if install_tests
|
2022-01-06 19:12:27 +08:00
|
|
|
install_data('test-fstab-generator.sh',
|
|
|
|
install_mode : 'rwxr-xr-x',
|
2023-01-26 23:07:17 +08:00
|
|
|
install_dir : unittestsdir)
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
2022-01-06 19:12:27 +08:00
|
|
|
|
2023-06-26 02:46:05 +08:00
|
|
|
test_network_generator_conversion_sh = find_program('test-network-generator-conversion.sh')
|
|
|
|
if want_tests != 'false'
|
|
|
|
exe = executables_by_name.get('systemd-network-generator')
|
|
|
|
test('test-network-generator-conversion',
|
|
|
|
test_network_generator_conversion_sh,
|
|
|
|
# https://github.com/mesonbuild/meson/issues/2681
|
|
|
|
args : exe.full_path(),
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : exe,
|
|
|
|
suite : 'network')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
|
|
|
if install_tests
|
2022-01-06 18:04:54 +08:00
|
|
|
install_data('test-network-generator-conversion.sh',
|
|
|
|
install_mode : 'rwxr-xr-x',
|
2023-01-26 23:07:17 +08:00
|
|
|
install_dir : unittestsdir)
|
tests: add a runner for installed tests
We have "installed tests", but don't provide an easy way to run them.
The protocol is very simple: each test must return 0 for success, 77 means
"skipped", anything else is an error. In addition, we want to print test
output only if the test failed.
I wrote this simple script. It is pretty basic, but implements the functions
listed above. Since it is written in python it should be easy to add option
parsing (like running only specific tests, or running unsafe tests, etc.)
I looked at the following alternatives:
- Ubuntu root-unittests: this works, but just dumps all output to the terminal,
has no coloring.
- @ssahani's test runner [2]
It uses the unittest library and the test suite was implented as a class, and
doesn't implement any of the functions listed above.
- cram [3,4]
cram runs our tests, but does not understand the "ignore the output" part,
has not support for our magic skip code (it uses hardcoded 80 instead),
and seems dead upstream.
- meson test
Here the idea would be to provide an almost-empty meson.build file under
/usr/lib/systemd/tests/ that would just define all the tests. This would
allow us to reuse the test runner we use normally. Unfortunately meson requires
a build directory and configuration to be done before running tests. This
would be possible, but seems a lot of effort to just run a few binaries.
[1] https://salsa.debian.org/systemd-team/systemd/blob/242c96addb06480ec9cd75248a5660f37a17b4b9/debian/tests/root-unittests
[2] https://github.com/systemd/systemd-fedora-ci/blob/master/upstream/systemd-upstream-tests.py
[3] https://bitheap.org/cram/
[4] https://pypi.org/project/pytest-cram/
Fixes #10069.
2018-09-20 22:34:14 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2023-05-05 20:46:34 +08:00
|
|
|
sys_script_py = files('sys-script.py')
|
|
|
|
test_udev_py = files('test-udev.py')
|
2023-06-26 07:52:26 +08:00
|
|
|
if want_tests != 'false'
|
|
|
|
exe = executables_by_name.get('test-udev-rule-runner')
|
|
|
|
test('test-udev',
|
|
|
|
test_udev_py,
|
|
|
|
args : ['-v'],
|
|
|
|
env : ['UDEV_RULE_RUNNER=' + exe.full_path()],
|
|
|
|
depends : exe,
|
2023-06-27 00:28:32 +08:00
|
|
|
timeout : 180,
|
|
|
|
suite : 'udev')
|
2023-06-26 07:52:26 +08:00
|
|
|
endif
|
2023-05-05 20:46:34 +08:00
|
|
|
if install_tests
|
|
|
|
install_data(
|
|
|
|
sys_script_py,
|
|
|
|
test_udev_py,
|
|
|
|
install_dir : unittestsdir)
|
test: rewrite udev-test.pl in Python
I tried to keep this a 1:1 rewrite with the same field names.
Nevertheless, some changes were made:
- exp_add_error and exp_rem_error are dropped. Those fields meant that
"./test-udev add <devpath>" actually succeeded, but symlinks were not
created, and exp_links was ignored and could contain bogus content.
Instead, exp_links and not_exp_links are adjusted to not contain
garbage and the tests check that "./test-udev add" succeeds and that
the links are as expected from exp_links and not_exp_links.
- cleanup was only used in one rule, and that rule was expected to fail,
so cleanup wasn't actually necessary. So the cleanup field and the
logic to call cleanup from individual tests is removed.
- a bunch of fields were set, but didn't seem to be connected to any
implementation: not_exp_name, not_exp_test.
e62acc3159935781f05fa59c48e5a74e85c61ce2 did a rewrite of some of the
tests and it seems that not_exp_test was added by mistake and
not_exp_name was left behind by mistake.
In Python, the field list is declared in the class, so it's harder to
assign an unused attribute. Those uses were converted to not_exp_links.
- in most rules, r"""…""" is used, so that escaping is not necessary.
- the logic to generate devices was only used in one place, and the
generator function also had provisions to handle arguments that were
never given. all_block_devs() is made much simpler.
- Descriptions that started with a capital letter were shortened
and lowercased.
- no special test case counting is done. pytest just counts the cases
(Rules objects).
- the output for failures is also removed. If something goes wrong, the
user can use pytest --pdb or such to debug the issue.
- perl version used a semaphore to manage udev runners, and would fork,
optionally wait a bit, and then start the runner. In the python
version, we just spawn them all and wait for them to exit. It's not
very convenient to call fork() from python, so instead the runner
was modified (in previous commit) to wait.
The test can be called as:
(cd build && sudo pytest -v ../test/udev-test.py)
sudo meson test -C build udev-test.py -v
I think this generally provides functionality that is close to the perl
version. It seems some of the checks are now more fully implemented.
Support for strace/gdb/valgrind is missing.
Runtime goes down: 8.36 s → 5.78 s.
2023-05-05 04:40:38 +08:00
|
|
|
endif
|
|
|
|
|
2020-12-16 19:21:43 +08:00
|
|
|
############################################################
|
|
|
|
|
2022-03-04 03:30:43 +08:00
|
|
|
rpm = find_program('rpm', required : false)
|
|
|
|
rpmspec = find_program('rpmspec', required : false)
|
|
|
|
test_rpm_macros = find_program('test-rpm-macros.sh')
|
|
|
|
|
|
|
|
if rpm.found() and rpmspec.found()
|
|
|
|
if want_tests != 'false'
|
|
|
|
test('test-rpm-macros',
|
|
|
|
test_rpm_macros,
|
2023-05-18 23:16:33 +08:00
|
|
|
suite : 'dist',
|
2022-03-04 03:30:43 +08:00
|
|
|
args : [project_build_root])
|
|
|
|
endif
|
|
|
|
else
|
|
|
|
message('Skipping test-rpm-macros since rpm and/or rpmspec are not available')
|
|
|
|
endif
|
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2023-07-30 05:32:53 +08:00
|
|
|
if want_tests != 'false' and conf.get('HAVE_DMI') == 1
|
2020-12-02 19:40:42 +08:00
|
|
|
udev_dmi_memory_id_test = find_program('udev-dmi-memory-id-test.sh')
|
2023-06-26 04:01:03 +08:00
|
|
|
exe = executables_by_name.get('dmi_memory_id')
|
2020-12-16 19:21:43 +08:00
|
|
|
|
2023-03-02 05:54:06 +08:00
|
|
|
if git.found() and fs.is_dir(project_source_root / '.git')
|
2020-12-16 19:21:43 +08:00
|
|
|
out = run_command(
|
2021-05-14 20:16:17 +08:00
|
|
|
env, '-u', 'GIT_WORK_TREE',
|
|
|
|
git, '--git-dir=@0@/.git'.format(project_source_root),
|
2022-01-11 17:56:22 +08:00
|
|
|
'ls-files', ':/test/dmidecode-dumps/*.bin',
|
|
|
|
check: true)
|
2020-12-16 19:21:43 +08:00
|
|
|
else
|
|
|
|
out = run_command(
|
2022-05-15 23:11:24 +08:00
|
|
|
sh, '-c', 'cd "$1"; echo test/dmidecode-dumps/*.bin', '_', project_source_root,
|
2022-01-11 17:56:22 +08:00
|
|
|
check: true)
|
2020-12-16 19:21:43 +08:00
|
|
|
endif
|
|
|
|
|
|
|
|
foreach p : out.stdout().split()
|
2021-07-28 01:32:35 +08:00
|
|
|
source = project_source_root / p
|
2023-06-14 22:05:52 +08:00
|
|
|
test('dmidecode_' + fs.stem(p),
|
2020-12-16 19:21:43 +08:00
|
|
|
udev_dmi_memory_id_test,
|
2023-06-27 00:28:32 +08:00
|
|
|
suite : 'udev',
|
2023-06-26 04:01:03 +08:00
|
|
|
args : [exe.full_path(),
|
2021-07-27 23:48:53 +08:00
|
|
|
source,
|
2022-01-21 22:28:23 +08:00
|
|
|
source + '.txt'],
|
2023-06-26 04:01:03 +08:00
|
|
|
depends : exe)
|
2020-12-16 19:21:43 +08:00
|
|
|
endforeach
|
2020-12-02 19:40:42 +08:00
|
|
|
endif
|
|
|
|
|
2023-06-26 02:46:05 +08:00
|
|
|
############################################################
|
|
|
|
|
|
|
|
if want_tests != 'false' and conf.get('ENABLE_KERNEL_INSTALL') == 1
|
|
|
|
kernel_install = executables_by_name.get('kernel-install')
|
|
|
|
args = [kernel_install.full_path(), loaderentry_install.full_path(), uki_copy_install]
|
|
|
|
deps = [kernel_install, loaderentry_install]
|
|
|
|
if want_ukify and boot_stubs.length() > 0
|
|
|
|
args += [ukify.full_path(), ukify_install.full_path(), boot_stubs[0]]
|
|
|
|
deps += [ukify, ukify_install, boot_stubs[0]]
|
|
|
|
endif
|
|
|
|
|
|
|
|
test('test-kernel-install',
|
|
|
|
test_kernel_install_sh,
|
|
|
|
env : test_env,
|
|
|
|
args : args,
|
2023-06-27 00:28:32 +08:00
|
|
|
depends : deps,
|
|
|
|
suite : 'kernel-install')
|
2023-06-26 02:46:05 +08:00
|
|
|
endif
|
2024-04-06 00:19:59 +08:00
|
|
|
|
|
|
|
############################################################
|
|
|
|
|
2024-04-25 03:18:27 +08:00
|
|
|
integration_test_wrapper = find_program('integration-test-wrapper.py')
|
2024-05-11 21:55:54 +08:00
|
|
|
integration_tests = []
|
|
|
|
integration_test_template = {
|
2024-05-13 04:01:19 +08:00
|
|
|
'mkosi-args' : [],
|
2024-05-11 21:55:54 +08:00
|
|
|
'timeout' : 1800,
|
|
|
|
'storage' : 'volatile',
|
|
|
|
'priority' : 0,
|
|
|
|
'firmware' : 'linux',
|
2024-05-12 02:22:10 +08:00
|
|
|
'enabled' : true,
|
2024-05-12 16:50:47 +08:00
|
|
|
'configuration' : {
|
|
|
|
'memory-accounting' : 'no',
|
|
|
|
'command' : testdata_dir / 'units/%N.sh',
|
2024-05-31 15:29:00 +08:00
|
|
|
'wants' : 'multi-user.target user@4711.service',
|
|
|
|
'after' : 'user@4711.service',
|
2024-05-12 16:50:47 +08:00
|
|
|
},
|
2024-05-12 17:13:58 +08:00
|
|
|
'cmdline' : [],
|
2024-05-15 04:14:27 +08:00
|
|
|
'credentials' : [],
|
|
|
|
'qemu-args' : [],
|
2024-05-15 16:09:53 +08:00
|
|
|
'exit-code' : 123,
|
2024-05-28 21:54:35 +08:00
|
|
|
'vm' : false,
|
2024-04-25 03:18:27 +08:00
|
|
|
}
|
2024-05-12 01:17:13 +08:00
|
|
|
testdata_subdirs = [
|
|
|
|
'auxv',
|
|
|
|
'journal-data',
|
|
|
|
'knot-data',
|
|
|
|
'test-journals',
|
|
|
|
'units',
|
|
|
|
'test-execute',
|
|
|
|
'test-fstab-generator',
|
|
|
|
'test-path',
|
|
|
|
'test-path-util',
|
|
|
|
'test-umount',
|
|
|
|
'test-network',
|
|
|
|
'test-network-generator-conversion',
|
|
|
|
]
|
2024-04-25 03:18:27 +08:00
|
|
|
|
2024-05-11 21:55:54 +08:00
|
|
|
foreach dirname : [
|
|
|
|
'TEST-01-BASIC',
|
|
|
|
'TEST-02-UNITTESTS',
|
|
|
|
'TEST-03-JOBS',
|
|
|
|
'TEST-04-JOURNAL',
|
|
|
|
'TEST-05-RLIMITS',
|
|
|
|
'TEST-06-SELINUX',
|
|
|
|
'TEST-07-PID1',
|
2024-05-12 02:22:10 +08:00
|
|
|
'TEST-08-INITRD',
|
2024-05-11 21:55:54 +08:00
|
|
|
'TEST-09-REBOOT',
|
|
|
|
'TEST-13-NSPAWN',
|
|
|
|
'TEST-15-DROPIN',
|
|
|
|
'TEST-16-EXTEND-TIMEOUT',
|
|
|
|
'TEST-17-UDEV',
|
|
|
|
'TEST-18-FAILUREACTION',
|
|
|
|
'TEST-19-CGROUP',
|
|
|
|
'TEST-21-DFUZZER',
|
|
|
|
'TEST-22-TMPFILES',
|
|
|
|
'TEST-23-UNIT-FILE',
|
2024-05-12 02:22:10 +08:00
|
|
|
'TEST-24-CRYPTSETUP',
|
2024-05-11 21:55:54 +08:00
|
|
|
'TEST-25-IMPORT',
|
|
|
|
'TEST-26-SYSTEMCTL',
|
|
|
|
'TEST-29-PORTABLE',
|
|
|
|
'TEST-30-ONCLOCKCHANGE',
|
|
|
|
'TEST-31-DEVICE-ENUMERATION',
|
|
|
|
'TEST-32-OOMPOLICY',
|
|
|
|
'TEST-34-DYNAMICUSERMIGRATE',
|
|
|
|
'TEST-35-LOGIN',
|
|
|
|
'TEST-36-NUMAPOLICY',
|
|
|
|
'TEST-38-FREEZER',
|
|
|
|
'TEST-43-PRIVATEUSER-UNPRIV',
|
|
|
|
'TEST-44-LOG-NAMESPACE',
|
|
|
|
'TEST-45-TIMEDATE',
|
|
|
|
'TEST-46-HOMED',
|
|
|
|
'TEST-50-DISSECT',
|
|
|
|
'TEST-52-HONORFIRSTSHUTDOWN',
|
|
|
|
'TEST-53-ISSUE-16347',
|
2024-05-12 02:22:10 +08:00
|
|
|
'TEST-54-CREDS',
|
2024-05-11 21:55:54 +08:00
|
|
|
'TEST-55-OOMD',
|
|
|
|
'TEST-58-REPART',
|
|
|
|
'TEST-59-RELOADING-RESTART',
|
|
|
|
'TEST-60-MOUNT-RATELIMIT',
|
|
|
|
'TEST-62-RESTRICT-IFACES',
|
|
|
|
'TEST-63-PATH',
|
2024-05-12 02:22:10 +08:00
|
|
|
'TEST-64-UDEV-STORAGE',
|
2024-05-11 21:55:54 +08:00
|
|
|
'TEST-65-ANALYZE',
|
|
|
|
'TEST-66-DEVICE-ISOLATION',
|
|
|
|
'TEST-67-INTEGRITY',
|
|
|
|
'TEST-68-PROPAGATE-EXIT-STATUS',
|
2024-05-12 02:22:10 +08:00
|
|
|
'TEST-69-SHUTDOWN',
|
2024-05-11 21:55:54 +08:00
|
|
|
'TEST-70-TPM2',
|
|
|
|
'TEST-71-HOSTNAME',
|
|
|
|
'TEST-72-SYSUPDATE',
|
|
|
|
'TEST-73-LOCALE',
|
|
|
|
'TEST-74-AUX-UTILS',
|
|
|
|
'TEST-75-RESOLVED',
|
|
|
|
'TEST-76-SYSCTL',
|
|
|
|
'TEST-78-SIGQUEUE',
|
|
|
|
'TEST-79-MEMPRESS',
|
|
|
|
'TEST-80-NOTIFYACCESS',
|
|
|
|
'TEST-81-GENERATORS',
|
2024-05-12 02:22:10 +08:00
|
|
|
'TEST-82-SOFTREBOOT',
|
2024-05-11 21:55:54 +08:00
|
|
|
'TEST-83-BTRFS',
|
|
|
|
'TEST-84-STORAGETM',
|
2024-05-06 21:23:24 +08:00
|
|
|
'TEST-85-NETWORK',
|
2024-07-16 16:48:39 +08:00
|
|
|
'TEST-86-MULTI-PROFILE-UKI',
|
2024-05-11 21:55:54 +08:00
|
|
|
]
|
|
|
|
subdir(dirname)
|
|
|
|
endforeach
|
|
|
|
|
|
|
|
foreach integration_test : integration_tests
|
|
|
|
integration_test_args = [
|
2024-04-25 03:18:27 +08:00
|
|
|
'--meson-source-dir', meson.project_source_root(),
|
|
|
|
'--meson-build-dir', meson.project_build_root(),
|
2024-05-12 01:40:03 +08:00
|
|
|
'--name', integration_test['name'],
|
2024-05-11 21:55:54 +08:00
|
|
|
'--storage', integration_test['storage'],
|
|
|
|
'--firmware', integration_test['firmware'],
|
2024-05-15 16:09:53 +08:00
|
|
|
'--exit-code', integration_test['exit-code'].to_string(),
|
2024-05-07 21:20:44 +08:00
|
|
|
]
|
|
|
|
|
2024-05-12 16:50:47 +08:00
|
|
|
if 'unit' in integration_test
|
|
|
|
integration_test_unit = integration_test['unit']
|
|
|
|
else
|
|
|
|
integration_test_unit = configure_file(
|
|
|
|
input : 'test.service.in',
|
|
|
|
output : '@0@.service'.format(integration_test['name']),
|
|
|
|
configuration : integration_test['configuration'],
|
|
|
|
)
|
|
|
|
endif
|
|
|
|
|
|
|
|
integration_test_args += ['--unit', fs.name(integration_test_unit)]
|
|
|
|
if install_tests
|
|
|
|
install_data(integration_test_unit, install_dir : testdata_dir / 'units')
|
|
|
|
endif
|
|
|
|
|
2024-05-28 21:54:35 +08:00
|
|
|
if integration_test['vm']
|
|
|
|
integration_test_args += ['--vm']
|
2024-05-07 21:20:44 +08:00
|
|
|
endif
|
|
|
|
|
2024-05-27 17:15:02 +08:00
|
|
|
if not mkosi.found()
|
|
|
|
continue
|
|
|
|
endif
|
|
|
|
|
|
|
|
integration_test_args += ['--mkosi', mkosi.full_path(), '--']
|
2024-05-12 17:13:58 +08:00
|
|
|
|
|
|
|
if integration_test['cmdline'].length() > 0
|
|
|
|
integration_test_args += [
|
|
|
|
'--kernel-command-line-extra=@0@'.format(' '.join(integration_test['cmdline']))
|
|
|
|
]
|
|
|
|
endif
|
|
|
|
|
2024-05-15 04:14:27 +08:00
|
|
|
foreach credential : integration_test['credentials']
|
|
|
|
integration_test_args += ['--credential', credential]
|
|
|
|
endforeach
|
|
|
|
|
|
|
|
if integration_test['qemu-args'].length() > 0
|
|
|
|
integration_test_args += ['--qemu-args=@0@'.format(' '.join(integration_test['qemu-args']))]
|
|
|
|
endif
|
|
|
|
|
2024-05-12 17:13:58 +08:00
|
|
|
integration_test_args += integration_test['mkosi-args']
|
2024-04-25 03:18:27 +08:00
|
|
|
|
2024-05-03 02:21:29 +08:00
|
|
|
integration_test_env = {}
|
|
|
|
|
|
|
|
if want_integration_tests
|
2024-05-07 21:20:44 +08:00
|
|
|
integration_test_env += {'SYSTEMD_INTEGRATION_TESTS': '1'}
|
2024-05-03 02:21:29 +08:00
|
|
|
endif
|
|
|
|
|
2024-05-12 02:22:10 +08:00
|
|
|
if not integration_test['enabled']
|
|
|
|
continue
|
|
|
|
endif
|
|
|
|
|
2024-05-11 21:55:54 +08:00
|
|
|
# We don't explicitly depend on the "mkosi" target because that means the image is rebuilt on every
|
|
|
|
# "ninja -C build". Instead, the mkosi target has to be rebuilt manually before running the
|
|
|
|
# integration tests with mkosi.
|
|
|
|
test(
|
|
|
|
integration_test['name'],
|
|
|
|
integration_test_wrapper,
|
|
|
|
env : integration_test_env,
|
|
|
|
args : integration_test_args,
|
|
|
|
timeout : integration_test['timeout'],
|
|
|
|
priority : integration_test['priority'],
|
|
|
|
suite : 'integration-tests',
|
|
|
|
)
|
2024-04-25 03:18:27 +08:00
|
|
|
endforeach
|
2024-05-12 01:17:13 +08:00
|
|
|
|
|
|
|
if install_tests
|
|
|
|
foreach subdir : testdata_subdirs
|
|
|
|
# install_subdir() before meson 1.3.0 does not handle symlinks correctly (it follows them
|
|
|
|
# instead of copying the symlink) so we use rsync instead.
|
|
|
|
if meson.version().version_compare('<1.3.0')
|
|
|
|
if not rsync.found()
|
|
|
|
error('rsync is required to install the integration test data')
|
|
|
|
endif
|
|
|
|
|
|
|
|
rsync_r = rsync.full_path() + ' -rlpt --exclude .gitattributes --exclude 25-default.link -- "@0@" "${DESTDIR:-}@1@"'
|
|
|
|
meson.add_install_script(sh, '-c',
|
|
|
|
rsync_r.format(meson.current_source_dir() / subdir, testdata_dir))
|
|
|
|
else
|
|
|
|
install_subdir(subdir,
|
|
|
|
exclude_files : ['.gitattributes', '25-default.link'],
|
|
|
|
install_dir : testdata_dir,
|
|
|
|
follow_symlinks : false)
|
|
|
|
endif
|
|
|
|
endforeach
|
|
|
|
|
|
|
|
# test-network/conf/25-default.link is a local symlink that becomes dangling when installed, so we
|
|
|
|
# exclude it and create the correct symlink here.
|
|
|
|
meson.add_install_script(sh, '-c', ln_s.format(networkdir / '99-default.link',
|
|
|
|
testdata_dir / 'test-network/conf/25-default.link'))
|
|
|
|
|
|
|
|
install_data(kbd_model_map,
|
|
|
|
install_dir : testdata_dir + '/test-keymap-util')
|
|
|
|
|
|
|
|
if conf.get('HAVE_ZSTD') == 1 and efi_arch != ''
|
|
|
|
install_subdir('test-bcd',
|
|
|
|
exclude_files : '.gitattributes',
|
|
|
|
install_dir : testdata_dir)
|
|
|
|
endif
|
|
|
|
if conf.get('ENABLE_RESOLVE') == 1
|
|
|
|
install_subdir('test-resolve',
|
|
|
|
exclude_files : '.gitattributes',
|
|
|
|
install_dir : testdata_dir)
|
|
|
|
endif
|
|
|
|
|
|
|
|
# The unit tests implemented as shell scripts expect to find testdata/
|
|
|
|
# in the directory where they are stored.
|
|
|
|
meson.add_install_script(sh, '-c', ln_s.format(testdata_dir,
|
|
|
|
unittestsdir / 'testdata'))
|
|
|
|
endif
|