tests: increase sleep in health_systemd-units test
I have seen the attached log output in a failed test. Honestly, I am not
sure but it might be caused by a short sleep under asan/ubsan. The last
lines of the log are not supposed to happen like this (unit4 shall not
be picked as active/running after its registration but it's state should
be failed/failed).
2023-07-12 10:45:24.650301 | w | 23.209 [3224 health T] Registered systemd unit watcher for 'unit1.service'
2023-07-12 10:45:24.650306 | w | 23.211 [3224 health D] Systemd unit 'unit1.service' changed state (active running)
2023-07-12 10:45:24.650311 | w | 23.223 [3224 health T] Registered systemd unit watcher for 'unit2.service'
2023-07-12 10:45:24.650322 | w | 23.224 [3224 health D] Systemd unit 'unit2.service' changed state (activating auto-restart)
2023-07-12 10:45:24.650327 | w | 23.234 [3224 health T] Registered systemd unit watcher for 'unit3.service'
2023-07-12 10:45:24.650331 | w | 23.236 [3224 health D] Systemd unit 'unit3.service' changed state (failed failed)
2023-07-12 10:45:24.650335 | w | 23.340 [3224 main E] dataFrom /ietf-alarms:alarms/alarm-inventory
2023-07-12 10:45:24.650339 | w | 23.356 [3253 health D] Systemd unit 'unit2.service' changed state (active running)
2023-07-12 10:45:24.650343 | w | [doctest] doctest version is "2.4.11"
2023-07-12 10:45:24.650347 | w | [doctest] run with "--help" for options
2023-07-12 10:45:24.650352 | w | /home/ci/src/cesnet-gerrit-public/CzechLight/velia/tests/health_systemd-units.cpp:79
2023-07-12 10:45:24.650357 | w | fakeAlarmServer.rpcCalled(alarmRPC, std::map<std::string, std::string>{ {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-text", "systemd unit state: (active, running)"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-id", "velia-alarms:systemd-unit-failure"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-qualifier", ""}, {"/sysrepo-ietf-alarms:create-or-update-alarm/resource", "unit1.service"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/severity", "cleared"} }) with.
2023-07-12 10:45:24.650362 | w | param _1 == /sysrepo-ietf-alarms:create-or-update-alarm
2023-07-12 10:45:24.650381 | w | param _2 == { { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-text, systemd unit state: (active, running) }, { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-id, velia-alarms:systemd-unit-failure }, { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-qualifier, }, { /sysrepo-ietf-alarms:create-or-update-alarm/resource, unit1.service }, { /sysrepo-ietf-alarms:create-or-update-alarm/severity, cleared } }
2023-07-12 10:45:24.650395 | w |
2023-07-12 10:45:24.650399 | w | /home/ci/src/cesnet-gerrit-public/CzechLight/velia/tests/health_systemd-units.cpp:81
2023-07-12 10:45:24.650412 | w | fakeAlarmServer.rpcCalled(alarmRPC, std::map<std::string, std::string>{ {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-text", "systemd unit state: (activating, auto-restart)"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-id", "velia-alarms:systemd-unit-failure"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-qualifier", ""}, {"/sysrepo-ietf-alarms:create-or-update-alarm/resource", "unit2.service"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/severity", "critical"} }) with.
2023-07-12 10:45:24.650422 | w | param _1 == /sysrepo-ietf-alarms:create-or-update-alarm
2023-07-12 10:45:24.650441 | w | param _2 == { { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-text, systemd unit state: (activating, auto-restart) }, { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-id, velia-alarms:systemd-unit-failure }, { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-qualifier, }, { /sysrepo-ietf-alarms:create-or-update-alarm/resource, unit2.service }, { /sysrepo-ietf-alarms:create-or-update-alarm/severity, critical } }
2023-07-12 10:45:24.650452 | w |
2023-07-12 10:45:24.650455 | w | /home/ci/src/cesnet-gerrit-public/CzechLight/velia/tests/health_systemd-units.cpp:83
2023-07-12 10:45:24.650459 | w | fakeAlarmServer.rpcCalled(alarmRPC, std::map<std::string, std::string>{ {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-text", "systemd unit state: (failed, failed)"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-id", "velia-alarms:systemd-unit-failure"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-qualifier", ""}, {"/sysrepo-ietf-alarms:create-or-update-alarm/resource", "unit3.service"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/severity", "critical"} }) with.
2023-07-12 10:45:24.650463 | w | param _1 == /sysrepo-ietf-alarms:create-or-update-alarm
2023-07-12 10:45:24.650468 | w | param _2 == { { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-text, systemd unit state: (failed, failed) }, { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-id, velia-alarms:systemd-unit-failure }, { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-qualifier, }, { /sysrepo-ietf-alarms:create-or-update-alarm/resource, unit3.service }, { /sysrepo-ietf-alarms:create-or-update-alarm/severity, critical } }
2023-07-12 10:45:24.650471 | w |
2023-07-12 10:45:24.650475 | w | /home/ci/src/cesnet-gerrit-public/CzechLight/velia/tests/health_systemd-units.cpp:103
2023-07-12 10:45:24.650483 | w | fakeAlarmServer.rpcCalled(alarmRPC, std::map<std::string, std::string>{ {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-text", "systemd unit state: (active, running)"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-id", "velia-alarms:systemd-unit-failure"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/alarm-type-qualifier", ""}, {"/sysrepo-ietf-alarms:create-or-update-alarm/resource", "unit2.service"}, {"/sysrepo-ietf-alarms:create-or-update-alarm/severity", "cleared"} }) with.
2023-07-12 10:45:24.650487 | w | param _1 == /sysrepo-ietf-alarms:create-or-update-alarm
2023-07-12 10:45:24.650492 | w | param _2 == { { /sysrepo-ietf-alarms:create-or-update-alarm/alarm-text, systemd unit state: (active, running) }, { /sysrepo-ietf-alarms:cr23.365 [3253 health D] Systemd unit 'unit3.service' changed state (active running)
2023-07-12 10:45:24.650513 | w | 23.387 [3253 health T] Systemd unit 'unit3.service' changed state but it is the same state as before (active, running)
2023-07-12 10:45:24.650517 | w | 23.391 [3253 health T] Registered systemd unit watcher for 'unit4.service'
2023-07-12 10:45:24.650521 | w | 23.406 [3253 health D] Systemd unit 'unit4.service' changed state (active running)
2023-07-12 10:45:24.650533 | w | terminate called after throwing an instance of 'doctest::detail::TestFailureException'
Change-Id: I393ff4a935ce6929bca10795b615da8f87217cff
diff --git a/tests/health_systemd-units.cpp b/tests/health_systemd-units.cpp
index abd38f5..d4e9974 100644
--- a/tests/health_systemd-units.cpp
+++ b/tests/health_systemd-units.cpp
@@ -121,7 +121,7 @@
server.createUnit(*serverConnection, "unit4.service", "/org/freedesktop/systemd1/unit/unit4", "failed", "failed");
// Sleep for a while; the rest of the code might be too fast and we need to be sure that we pick up event for (failed, failed) before the state of unit4 is changed in the DBus server
- std::this_thread::sleep_for(25ms);
+ std::this_thread::sleep_for(250ms);
server.changeUnitState("/org/freedesktop/systemd1/unit/unit3", "activating", "auto-restart");
server.changeUnitState("/org/freedesktop/systemd1/unit/unit3", "active", "running");