# STDOUT: ---v---v---v---v---v--- ansible-playbook [core 2.14.2] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.14/lib/python3.9/site-packages/ansible ansible collection location = /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection executable location = /opt/ansible-2.14/bin/ansible-playbook python version = 3.9.13 (main, Jun 24 2022, 15:32:51) [GCC 8.5.0 20210514 (Red Hat 8.5.0-13)] (/opt/ansible-2.14/bin/python3.9) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_qnetd_disabled.yml ********************************************* 2 plays in /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:4 Saturday 11 February 2023 20:42:30 +0000 (0:00:00.605) 0:00:00.605 ***** ok: [sut] => { "ansible_facts": { "ha_cluster_hacluster_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n31303833633366333561656439323930303361333161363239346166656537323933313436\n3432386236656563343237306335323637396239616230353561330a313731623238393238\n62343064666336643930663239383936616465643134646536656532323461356237646133\n3761616633323839633232353637366266350a313163633236376666653238633435306565\n3264623032333736393535663833\n" } }, "ansible_included_var_files": [ "/WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test qnetd setup with qnetd not starting on boot] ************************ TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:9 Saturday 11 February 2023 20:42:31 +0000 (0:00:01.123) 0:00:01.729 ***** ok: [sut] TASK [Set up test environment] ************************************************* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:21 Saturday 11 February 2023 20:42:49 +0000 (0:00:17.946) 0:00:19.676 ***** TASK [fedora.linux_system_roles.ha_cluster : Set node name to 'localhost' for single-node clusters] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:9 Saturday 11 February 2023 20:42:51 +0000 (0:00:01.865) 0:00:21.541 ***** ok: [sut] => { "ansible_facts": { "inventory_hostname": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Ensure facts used by tests] ******* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:14 Saturday 11 February 2023 20:42:53 +0000 (0:00:01.790) 0:00:23.331 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Do not try to enable RHEL repositories] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:20 Saturday 11 February 2023 20:42:54 +0000 (0:00:01.023) 0:00:24.354 ***** ok: [sut] => { "ansible_facts": { "ha_cluster_enable_repos": false }, "changed": false } TASK [Clean up test environment for qnetd / qdevice] *************************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:26 Saturday 11 February 2023 20:42:55 +0000 (0:00:01.306) 0:00:25.661 ***** TASK [fedora.linux_system_roles.ha_cluster : Make sure qnetd is not installed] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:9 Saturday 11 February 2023 20:42:57 +0000 (0:00:01.305) 0:00:26.966 ***** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.ha_cluster : Make sure qnetd config files are not present] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:15 Saturday 11 February 2023 20:43:05 +0000 (0:00:08.417) 0:00:35.383 ***** ok: [sut] => { "changed": false, "path": "/etc/corosync/qnetd", "state": "absent" } TASK [Run HA Cluster role] ***************************************************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:31 Saturday 11 February 2023 20:43:15 +0000 (0:00:09.757) 0:00:45.141 ***** TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:3 Saturday 11 February 2023 20:43:20 +0000 (0:00:05.615) 0:00:50.756 ***** included: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Ensure ansible_facts used by role] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:2 Saturday 11 February 2023 20:43:23 +0000 (0:00:02.310) 0:00:53.067 ***** ok: [sut] TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:7 Saturday 11 February 2023 20:43:27 +0000 (0:00:04.637) 0:00:57.704 ***** skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=RedHat_9.yml) => { "ansible_facts": { "__ha_cluster_repos": [ { "id": "rhel-9-for-{{ ansible_architecture }}-highavailability-rpms", "name": "High Availability" } ] }, "ansible_included_var_files": [ "/WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/RedHat_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat_9.yml" } skipping: [sut] => (item=RedHat_9.2.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat_9.2.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check and prepare role variables] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:6 Saturday 11 February 2023 20:43:31 +0000 (0:00:03.979) 0:01:01.684 ***** included: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Fail if passwords are not specified] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:5 Saturday 11 February 2023 20:43:34 +0000 (0:00:03.201) 0:01:04.885 ***** skipping: [sut] => (item=ha_cluster_hacluster_password) => { "ansible_loop_var": "item", "changed": false, "item": "ha_cluster_hacluster_password", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Fail if nodes do not have the same number of SBD devices specified] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:15 Saturday 11 February 2023 20:43:41 +0000 (0:00:06.107) 0:01:10.993 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if configuring qnetd on a cluster node] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:30 Saturday 11 February 2023 20:43:41 +0000 (0:00:00.712) 0:01:11.706 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Discover cluster node names] ****** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:40 Saturday 11 February 2023 20:43:42 +0000 (0:00:00.905) 0:01:12.611 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_node_name": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Collect cluster node names] ******* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:44 Saturday 11 February 2023 20:43:45 +0000 (0:00:02.941) 0:01:15.553 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_all_node_names": [ "localhost" ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Extract qdevice settings] ********* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:48 Saturday 11 February 2023 20:43:48 +0000 (0:00:02.547) 0:01:18.100 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_qdevice_host": "", "__ha_cluster_qdevice_in_use": false, "__ha_cluster_qdevice_model": "", "__ha_cluster_qdevice_pcs_address": "" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Figure out if ATB needs to be enabled for SBD] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:57 Saturday 11 February 2023 20:43:57 +0000 (0:00:09.522) 0:01:27.622 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_sbd_needs_atb": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if SBD needs ATB enabled and the user configured ATB to be disabled] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:67 Saturday 11 February 2023 20:44:02 +0000 (0:00:04.945) 0:01:32.567 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_pcsd_public_key_src and ha_cluster_pcsd_private_key_src are set along with ha_cluster_pcsd_certificates] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:74 Saturday 11 February 2023 20:44:05 +0000 (0:00:03.119) 0:01:35.687 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Enable package repositories] ****** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:9 Saturday 11 February 2023 20:44:07 +0000 (0:00:02.080) 0:01:37.767 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Install role essential packages] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:14 Saturday 11 February 2023 20:44:09 +0000 (0:00:01.514) 0:01:39.282 ***** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: libknet1-1.25-2.el9.x86_64", "Installed: libknet1-compress-bzip2-plugin-1.25-2.el9.x86_64", "Installed: libknet1-compress-lz4-plugin-1.25-2.el9.x86_64", "Installed: libknet1-compress-lzma-plugin-1.25-2.el9.x86_64", "Installed: libknet1-compress-lzo2-plugin-1.25-2.el9.x86_64", "Installed: corosynclib-3.1.7-1.el9.x86_64", "Installed: libknet1-compress-plugins-all-1.25-2.el9.x86_64", "Installed: libknet1-compress-zlib-plugin-1.25-2.el9.x86_64", "Installed: libknet1-compress-zstd-plugin-1.25-2.el9.x86_64", "Installed: libknet1-crypto-nss-plugin-1.25-2.el9.x86_64", "Installed: libknet1-crypto-openssl-plugin-1.25-2.el9.x86_64", "Installed: libknet1-crypto-plugins-all-1.25-2.el9.x86_64", "Installed: libknet1-plugins-all-1.25-2.el9.x86_64", "Installed: libnozzle1-1.25-2.el9.x86_64", "Installed: ruby-3.0.4-160.el9_0.x86_64", "Installed: pacemaker-2.1.5-5.el9.x86_64", "Installed: pacemaker-cli-2.1.5-5.el9.x86_64", "Installed: pacemaker-cluster-libs-2.1.5-5.el9.x86_64", "Installed: nspr-4.34.0-14.el9_0.x86_64", "Installed: ruby-default-gems-3.0.4-160.el9_0.noarch", "Installed: pacemaker-libs-2.1.5-5.el9.x86_64", "Installed: nss-3.79.0-14.el9_0.x86_64", "Installed: device-mapper-event-9:1.02.187-5.el9.x86_64", "Installed: device-mapper-event-libs-9:1.02.187-5.el9.x86_64", "Installed: ruby-libs-3.0.4-160.el9_0.x86_64", "Installed: pacemaker-schemas-2.1.5-5.el9.noarch", "Installed: libqb-2.0.6-2.el9.x86_64", "Installed: nss-softokn-3.79.0-14.el9_0.x86_64", "Installed: rubygem-bigdecimal-3.0.0-160.el9_0.x86_64", "Installed: device-mapper-persistent-data-0.9.0-13.el9.x86_64", "Installed: nss-softokn-freebl-3.79.0-14.el9_0.x86_64", "Installed: rubygem-bundler-2.2.33-160.el9_0.noarch", "Installed: pcs-0.11.4-4.el9.x86_64", "Installed: nss-sysinit-3.79.0-14.el9_0.x86_64", "Installed: nss-tools-3.79.0-14.el9_0.x86_64", "Installed: pkgconf-1.7.3-10.el9.x86_64", "Installed: pkgconf-m4-1.7.3-10.el9.noarch", "Installed: pkgconf-pkg-config-1.7.3-10.el9.x86_64", "Installed: nss-util-3.79.0-14.el9_0.x86_64", "Installed: perl-TimeDate-1:2.33-6.el9.noarch", "Installed: rubygem-io-console-0.5.7-160.el9_0.x86_64", "Installed: rubygem-json-2.5.1-160.el9_0.x86_64", "Installed: libaio-0.3.111-13.el9.x86_64", "Installed: rubygem-psych-3.3.2-160.el9_0.x86_64", "Installed: resource-agents-4.10.0-34.el9.x86_64", "Installed: python3-cffi-1.14.5-5.el9.x86_64", "Installed: lvm2-9:2.03.17-5.el9.x86_64", "Installed: rubygem-rdoc-6.3.3-160.el9_0.noarch", "Installed: lvm2-libs-9:2.03.17-5.el9.x86_64", "Installed: libpkgconf-1.7.3-10.el9.x86_64", "Installed: python3-cryptography-36.0.1-2.el9.x86_64", "Installed: rubygem-rexml-3.2.5-160.el9_0.noarch", "Installed: rubygems-3.2.33-160.el9_0.noarch", "Installed: redhat-logos-90.4-1.el9.x86_64", "Installed: corosync-3.1.7-1.el9.x86_64", "Installed: python3-pycurl-7.43.0.6-8.el9.x86_64", "Installed: corosync-qnetd-3.0.2-1.el9.x86_64", "Installed: net-snmp-libs-1:5.9.1-7.el9_0.1.x86_64", "Installed: bzip2-1.0.8-8.el9.x86_64", "Installed: net-tools-2.0-0.62.20160912git.el9.x86_64", "Installed: python3-ply-3.11-14.el9.noarch", "Installed: python3-pycparser-2.20-6.el9.noarch" ] } TASK [fedora.linux_system_roles.ha_cluster : Set hacluster password] *********** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:21 Saturday 11 February 2023 20:44:30 +0000 (0:00:20.646) 0:01:59.929 ***** changed: [sut] => { "append": false, "changed": true, "comment": "cluster user", "group": 189, "home": "/home/hacluster", "move_home": false, "name": "hacluster", "password": "NOT_LOGGING_PASSWORD", "shell": "/sbin/nologin", "state": "present", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Configure pcs / pcsd] ************* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:28 Saturday 11 February 2023 20:44:53 +0000 (0:00:23.762) 0:02:23.692 ***** included: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Stop pcsd] ************************ task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:6 Saturday 11 February 2023 20:44:58 +0000 (0:00:04.791) 0:02:28.483 ***** ok: [sut] => { "changed": false, "name": "pcsd", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target network-online.target systemd-journald.socket basic.target system.slice pcsd-ruby.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "\"man:pcsd(8)\" \"man:pcs(8)\"", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13988", "LimitNPROCSoft": "13988", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13988", "LimitSIGPENDINGSoft": "13988", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target network-online.target system.slice pcsd-ruby.service", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22380", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Regenerate pcsd TLS certificate and key] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:11 Saturday 11 February 2023 20:45:11 +0000 (0:00:12.417) 0:02:40.944 ***** skipping: [sut] => (item=/var/lib/pcsd/pcsd.key) => { "ansible_loop_var": "item", "changed": false, "item": "/var/lib/pcsd/pcsd.key", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/var/lib/pcsd/pcsd.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/var/lib/pcsd/pcsd.crt", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Get the stat of /var/lib/pcsd] **** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:25 Saturday 11 February 2023 20:45:11 +0000 (0:00:00.240) 0:02:41.185 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Allow certmonger to write into pcsd's certificate directory] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:30 Saturday 11 February 2023 20:45:11 +0000 (0:00:00.231) 0:02:41.416 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure the name of ha_cluster_pcsd_certificates is /var/lib/pcsd/pcsd; Create certificates using the certificate role] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:37 Saturday 11 February 2023 20:45:11 +0000 (0:00:00.196) 0:02:41.612 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set pcsd's certificate directory back to cluster_var_lib_t] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:49 Saturday 11 February 2023 20:45:11 +0000 (0:00:00.274) 0:02:41.887 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS private key] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:64 Saturday 11 February 2023 20:45:12 +0000 (0:00:00.283) 0:02:42.170 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS certificate] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:71 Saturday 11 February 2023 20:45:12 +0000 (0:00:00.197) 0:02:42.367 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf] ***** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:79 Saturday 11 February 2023 20:45:12 +0000 (0:00:00.303) 0:02:42.670 ***** changed: [sut] => { "changed": true, "checksum": "b504e1b9c9aa23803dd6f95e66c757088b08551d", "dest": "/var/lib/pcsd/pcs_settings.conf", "gid": 0, "group": "root", "md5sum": "087ff556d850518c8fff5ad1179d8817", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:cluster_var_lib_t:s0", "size": 359, "src": "/root/.ansible/tmp/ansible-tmp-1676148313.0323503-25965-251128603579606/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:88 Saturday 11 February 2023 20:45:15 +0000 (0:00:02.603) 0:02:45.274 ***** changed: [sut] => { "changed": true, "enabled": true, "name": "pcsd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket sysinit.target pcsd-ruby.service system.slice network-online.target basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "\"man:pcsd(8)\" \"man:pcs(8)\"", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13988", "LimitNPROCSoft": "13988", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13988", "LimitSIGPENDINGSoft": "13988", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target pcsd-ruby.service system.slice network-online.target", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22380", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities] *********** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:94 Saturday 11 February 2023 20:45:19 +0000 (0:00:04.630) 0:02:49.905 ***** ok: [sut] => { "changed": false, "cmd": [ "pcs", "--version", "--full" ], "delta": "0:00:00.727354", "end": "2023-02-11 20:45:21.600847", "rc": 0, "start": "2023-02-11 20:45:20.873493" } STDOUT: 0.11.4 booth booth.enable-authfile.set booth.enable-authfile.unset cluster.config.backup-local cluster.config.restore-cluster cluster.config.restore-local cluster.config.uuid cluster.create cluster.create.enable cluster.create.local cluster.create.no-keys-sync cluster.create.separated-name-and-address cluster.create.start cluster.create.start.wait cluster.create.transport.knet cluster.create.transport.udp-udpu cluster.create.transport.udp-udpu.no-rrp cluster.destroy cluster.destroy.all cluster.report cluster.verify corosync.authkey.update corosync.config.get corosync.config.get.struct corosync.config.reload corosync.config.sync-to-local-cluster corosync.config.update corosync.link.add corosync.link.remove corosync.link.remove.list corosync.link.update corosync.qdevice corosync.qdevice.model.net corosync.quorum corosync.quorum.device corosync.quorum.device.heuristics corosync.quorum.device.model.net corosync.quorum.set-expected-votes-runtime corosync.quorum.status corosync.quorum.unblock corosync.totem.block_unlisted_ips corosync.uidgid node.add node.add.enable node.add.separated-name-and-address node.add.start node.add.start.wait node.attributes node.attributes.set-list-for-node node.confirm-off node.fence node.guest node.kill node.maintenance node.maintenance.all node.maintenance.list node.maintenance.wait node.remote node.remote.onfail-demote node.remove node.remove-from-caches node.remove.list node.standby node.standby.all node.standby.list node.standby.wait node.start-stop-enable-disable node.start-stop-enable-disable.all node.start-stop-enable-disable.list node.start-stop-enable-disable.start-wait node.utilization node.utilization.set-list-for-node pcmk.acl.enable-disable pcmk.acl.group pcmk.acl.role pcmk.acl.role.create-with-permissions pcmk.acl.role.delete-with-users-groups pcmk.acl.user pcmk.alert pcmk.cib.checkpoints pcmk.cib.checkpoints.diff pcmk.cib.edit pcmk.cib.get pcmk.cib.get.scope pcmk.cib.roles.promoted-unpromoted pcmk.cib.set pcmk.constraint.colocation.set pcmk.constraint.colocation.set.options pcmk.constraint.colocation.simple pcmk.constraint.colocation.simple.options pcmk.constraint.hide-expired pcmk.constraint.location.simple pcmk.constraint.location.simple.options pcmk.constraint.location.simple.resource-regexp pcmk.constraint.location.simple.rule pcmk.constraint.location.simple.rule.node-attr-type-number pcmk.constraint.location.simple.rule.options pcmk.constraint.location.simple.rule.rule-add-remove pcmk.constraint.no-autocorrect pcmk.constraint.order.set pcmk.constraint.order.set.options pcmk.constraint.order.simple pcmk.constraint.order.simple.options pcmk.constraint.ticket.set pcmk.constraint.ticket.set.options pcmk.constraint.ticket.simple pcmk.constraint.ticket.simple.constraint-id pcmk.properties.cluster pcmk.properties.operation-defaults pcmk.properties.operation-defaults.multiple pcmk.properties.operation-defaults.rule pcmk.properties.operation-defaults.rule-rsc-op pcmk.properties.operation-defaults.rule.hide-expired pcmk.properties.operation-defaults.rule.node-attr-type-number pcmk.properties.resource-defaults pcmk.properties.resource-defaults.multiple pcmk.properties.resource-defaults.rule pcmk.properties.resource-defaults.rule-rsc-op pcmk.properties.resource-defaults.rule.hide-expired pcmk.properties.resource-defaults.rule.node-attr-type-number pcmk.resource.ban-move-clear pcmk.resource.ban-move-clear.clear-expired pcmk.resource.bundle pcmk.resource.bundle.container-docker pcmk.resource.bundle.container-docker.promoted-max pcmk.resource.bundle.container-podman pcmk.resource.bundle.container-podman.promoted-max pcmk.resource.bundle.container-rkt pcmk.resource.bundle.container-rkt.promoted-max pcmk.resource.bundle.reset pcmk.resource.bundle.wait pcmk.resource.cleanup pcmk.resource.cleanup.one-resource pcmk.resource.cleanup.strict pcmk.resource.clone pcmk.resource.clone.custom-id pcmk.resource.clone.meta-in-create pcmk.resource.clone.wait pcmk.resource.config.output-formats pcmk.resource.create pcmk.resource.create.clone.custom-id pcmk.resource.create.in-existing-bundle pcmk.resource.create.meta pcmk.resource.create.no-master pcmk.resource.create.operations pcmk.resource.create.operations.onfail-demote pcmk.resource.create.promotable pcmk.resource.create.promotable.custom-id pcmk.resource.create.wait pcmk.resource.debug pcmk.resource.delete pcmk.resource.disable.safe pcmk.resource.disable.safe.brief pcmk.resource.disable.safe.tag pcmk.resource.disable.simulate pcmk.resource.disable.simulate.brief pcmk.resource.disable.simulate.tag pcmk.resource.enable-disable pcmk.resource.enable-disable.list pcmk.resource.enable-disable.tag pcmk.resource.enable-disable.wait pcmk.resource.failcount pcmk.resource.group pcmk.resource.group.add-remove-list pcmk.resource.group.wait pcmk.resource.manage-unmanage pcmk.resource.manage-unmanage.list pcmk.resource.manage-unmanage.tag pcmk.resource.manage-unmanage.with-monitor pcmk.resource.move.autoclean pcmk.resource.move.autoclean.default pcmk.resource.promotable pcmk.resource.promotable.custom-id pcmk.resource.promotable.meta-in-create pcmk.resource.promotable.wait pcmk.resource.refresh pcmk.resource.refresh.one-resource pcmk.resource.refresh.strict pcmk.resource.relations pcmk.resource.relocate pcmk.resource.restart pcmk.resource.update pcmk.resource.update-meta pcmk.resource.update-meta.list pcmk.resource.update-meta.wait pcmk.resource.update-operations pcmk.resource.update-operations.onfail-demote pcmk.resource.update.meta pcmk.resource.update.operations pcmk.resource.update.operations.onfail-demote pcmk.resource.update.wait pcmk.resource.utilization pcmk.resource.utilization-set-list-for-resource pcmk.stonith.cleanup pcmk.stonith.cleanup.one-resource pcmk.stonith.cleanup.strict pcmk.stonith.create pcmk.stonith.create.in-group pcmk.stonith.create.meta pcmk.stonith.create.operations pcmk.stonith.create.operations.onfail-demote pcmk.stonith.create.wait pcmk.stonith.delete pcmk.stonith.enable-disable pcmk.stonith.enable-disable.list pcmk.stonith.enable-disable.wait pcmk.stonith.history.cleanup pcmk.stonith.history.show pcmk.stonith.history.update pcmk.stonith.levels pcmk.stonith.levels.add-remove-devices-list pcmk.stonith.levels.clear pcmk.stonith.levels.node-attr pcmk.stonith.levels.node-regexp pcmk.stonith.levels.verify pcmk.stonith.refresh pcmk.stonith.refresh.one-resource pcmk.stonith.refresh.strict pcmk.stonith.update pcmk.stonith.update.scsi-devices pcmk.stonith.update.scsi-devices.add-remove pcmk.stonith.update.scsi-devices.mpath pcmk.tag pcmk.tag.resources pcs.auth.client pcs.auth.client.cluster pcs.auth.client.token pcs.auth.deauth-client pcs.auth.deauth-server pcs.auth.no-bidirectional pcs.auth.separated-name-and-address pcs.auth.server.token pcs.cfg-in-file.cib pcs.daemon-ssl-cert.set pcs.daemon-ssl-cert.sync-to-local-cluster pcs.disaster-recovery.essentials pcs.reports.severity.deprecation pcs.request-timeout resource-agents.describe resource-agents.list resource-agents.list.detailed resource-agents.ocf.version-1-0 resource-agents.ocf.version-1-1 resource-agents.self-validation sbd sbd.option-timeout-action sbd.shared-block-device status.corosync.membership status.pcmk.resources.hide-inactive status.pcmk.resources.id status.pcmk.resources.node status.pcmk.resources.orphaned status.pcmk.xml stonith-agents.describe stonith-agents.list stonith-agents.list.detailed stonith-agents.ocf.version-1-0 stonith-agents.ocf.version-1-1 stonith-agents.self-validation TASK [fedora.linux_system_roles.ha_cluster : Parse pcs capabilities] *********** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:101 Saturday 11 February 2023 20:45:21 +0000 (0:00:01.643) 0:02:51.548 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_pcs_capabilities": [ "booth", "booth.enable-authfile.set", "booth.enable-authfile.unset", "cluster.config.backup-local", "cluster.config.restore-cluster", "cluster.config.restore-local", "cluster.config.uuid", "cluster.create", "cluster.create.enable", "cluster.create.local", "cluster.create.no-keys-sync", "cluster.create.separated-name-and-address", "cluster.create.start", "cluster.create.start.wait", "cluster.create.transport.knet", "cluster.create.transport.udp-udpu", "cluster.create.transport.udp-udpu.no-rrp", "cluster.destroy", "cluster.destroy.all", "cluster.report", "cluster.verify", "corosync.authkey.update", "corosync.config.get", "corosync.config.get.struct", "corosync.config.reload", "corosync.config.sync-to-local-cluster", "corosync.config.update", "corosync.link.add", "corosync.link.remove", "corosync.link.remove.list", "corosync.link.update", "corosync.qdevice", "corosync.qdevice.model.net", "corosync.quorum", "corosync.quorum.device", "corosync.quorum.device.heuristics", "corosync.quorum.device.model.net", "corosync.quorum.set-expected-votes-runtime", "corosync.quorum.status", "corosync.quorum.unblock", "corosync.totem.block_unlisted_ips", "corosync.uidgid", "node.add", "node.add.enable", "node.add.separated-name-and-address", "node.add.start", "node.add.start.wait", "node.attributes", "node.attributes.set-list-for-node", "node.confirm-off", "node.fence", "node.guest", "node.kill", "node.maintenance", "node.maintenance.all", "node.maintenance.list", "node.maintenance.wait", "node.remote", "node.remote.onfail-demote", "node.remove", "node.remove-from-caches", "node.remove.list", "node.standby", "node.standby.all", "node.standby.list", "node.standby.wait", "node.start-stop-enable-disable", "node.start-stop-enable-disable.all", "node.start-stop-enable-disable.list", "node.start-stop-enable-disable.start-wait", "node.utilization", "node.utilization.set-list-for-node", "pcmk.acl.enable-disable", "pcmk.acl.group", "pcmk.acl.role", "pcmk.acl.role.create-with-permissions", "pcmk.acl.role.delete-with-users-groups", "pcmk.acl.user", "pcmk.alert", "pcmk.cib.checkpoints", "pcmk.cib.checkpoints.diff", "pcmk.cib.edit", "pcmk.cib.get", "pcmk.cib.get.scope", "pcmk.cib.roles.promoted-unpromoted", "pcmk.cib.set", "pcmk.constraint.colocation.set", "pcmk.constraint.colocation.set.options", "pcmk.constraint.colocation.simple", "pcmk.constraint.colocation.simple.options", "pcmk.constraint.hide-expired", "pcmk.constraint.location.simple", "pcmk.constraint.location.simple.options", "pcmk.constraint.location.simple.resource-regexp", "pcmk.constraint.location.simple.rule", "pcmk.constraint.location.simple.rule.node-attr-type-number", "pcmk.constraint.location.simple.rule.options", "pcmk.constraint.location.simple.rule.rule-add-remove", "pcmk.constraint.no-autocorrect", "pcmk.constraint.order.set", "pcmk.constraint.order.set.options", "pcmk.constraint.order.simple", "pcmk.constraint.order.simple.options", "pcmk.constraint.ticket.set", "pcmk.constraint.ticket.set.options", "pcmk.constraint.ticket.simple", "pcmk.constraint.ticket.simple.constraint-id", "pcmk.properties.cluster", "pcmk.properties.operation-defaults", "pcmk.properties.operation-defaults.multiple", "pcmk.properties.operation-defaults.rule", "pcmk.properties.operation-defaults.rule-rsc-op", "pcmk.properties.operation-defaults.rule.hide-expired", "pcmk.properties.operation-defaults.rule.node-attr-type-number", "pcmk.properties.resource-defaults", "pcmk.properties.resource-defaults.multiple", "pcmk.properties.resource-defaults.rule", "pcmk.properties.resource-defaults.rule-rsc-op", "pcmk.properties.resource-defaults.rule.hide-expired", "pcmk.properties.resource-defaults.rule.node-attr-type-number", "pcmk.resource.ban-move-clear", "pcmk.resource.ban-move-clear.clear-expired", "pcmk.resource.bundle", "pcmk.resource.bundle.container-docker", "pcmk.resource.bundle.container-docker.promoted-max", "pcmk.resource.bundle.container-podman", "pcmk.resource.bundle.container-podman.promoted-max", "pcmk.resource.bundle.container-rkt", "pcmk.resource.bundle.container-rkt.promoted-max", "pcmk.resource.bundle.reset", "pcmk.resource.bundle.wait", "pcmk.resource.cleanup", "pcmk.resource.cleanup.one-resource", "pcmk.resource.cleanup.strict", "pcmk.resource.clone", "pcmk.resource.clone.custom-id", "pcmk.resource.clone.meta-in-create", "pcmk.resource.clone.wait", "pcmk.resource.config.output-formats", "pcmk.resource.create", "pcmk.resource.create.clone.custom-id", "pcmk.resource.create.in-existing-bundle", "pcmk.resource.create.meta", "pcmk.resource.create.no-master", "pcmk.resource.create.operations", "pcmk.resource.create.operations.onfail-demote", "pcmk.resource.create.promotable", "pcmk.resource.create.promotable.custom-id", "pcmk.resource.create.wait", "pcmk.resource.debug", "pcmk.resource.delete", "pcmk.resource.disable.safe", "pcmk.resource.disable.safe.brief", "pcmk.resource.disable.safe.tag", "pcmk.resource.disable.simulate", "pcmk.resource.disable.simulate.brief", "pcmk.resource.disable.simulate.tag", "pcmk.resource.enable-disable", "pcmk.resource.enable-disable.list", "pcmk.resource.enable-disable.tag", "pcmk.resource.enable-disable.wait", "pcmk.resource.failcount", "pcmk.resource.group", "pcmk.resource.group.add-remove-list", "pcmk.resource.group.wait", "pcmk.resource.manage-unmanage", "pcmk.resource.manage-unmanage.list", "pcmk.resource.manage-unmanage.tag", "pcmk.resource.manage-unmanage.with-monitor", "pcmk.resource.move.autoclean", "pcmk.resource.move.autoclean.default", "pcmk.resource.promotable", "pcmk.resource.promotable.custom-id", "pcmk.resource.promotable.meta-in-create", "pcmk.resource.promotable.wait", "pcmk.resource.refresh", "pcmk.resource.refresh.one-resource", "pcmk.resource.refresh.strict", "pcmk.resource.relations", "pcmk.resource.relocate", "pcmk.resource.restart", "pcmk.resource.update", "pcmk.resource.update-meta", "pcmk.resource.update-meta.list", "pcmk.resource.update-meta.wait", "pcmk.resource.update-operations", "pcmk.resource.update-operations.onfail-demote", "pcmk.resource.update.meta", "pcmk.resource.update.operations", "pcmk.resource.update.operations.onfail-demote", "pcmk.resource.update.wait", "pcmk.resource.utilization", "pcmk.resource.utilization-set-list-for-resource", "pcmk.stonith.cleanup", "pcmk.stonith.cleanup.one-resource", "pcmk.stonith.cleanup.strict", "pcmk.stonith.create", "pcmk.stonith.create.in-group", "pcmk.stonith.create.meta", "pcmk.stonith.create.operations", "pcmk.stonith.create.operations.onfail-demote", "pcmk.stonith.create.wait", "pcmk.stonith.delete", "pcmk.stonith.enable-disable", "pcmk.stonith.enable-disable.list", "pcmk.stonith.enable-disable.wait", "pcmk.stonith.history.cleanup", "pcmk.stonith.history.show", "pcmk.stonith.history.update", "pcmk.stonith.levels", "pcmk.stonith.levels.add-remove-devices-list", "pcmk.stonith.levels.clear", "pcmk.stonith.levels.node-attr", "pcmk.stonith.levels.node-regexp", "pcmk.stonith.levels.verify", "pcmk.stonith.refresh", "pcmk.stonith.refresh.one-resource", "pcmk.stonith.refresh.strict", "pcmk.stonith.update", "pcmk.stonith.update.scsi-devices", "pcmk.stonith.update.scsi-devices.add-remove", "pcmk.stonith.update.scsi-devices.mpath", "pcmk.tag", "pcmk.tag.resources", "pcs.auth.client", "pcs.auth.client.cluster", "pcs.auth.client.token", "pcs.auth.deauth-client", "pcs.auth.deauth-server", "pcs.auth.no-bidirectional", "pcs.auth.separated-name-and-address", "pcs.auth.server.token", "pcs.cfg-in-file.cib", "pcs.daemon-ssl-cert.set", "pcs.daemon-ssl-cert.sync-to-local-cluster", "pcs.disaster-recovery.essentials", "pcs.reports.severity.deprecation", "pcs.request-timeout", "resource-agents.describe", "resource-agents.list", "resource-agents.list.detailed", "resource-agents.ocf.version-1-0", "resource-agents.ocf.version-1-1", "resource-agents.self-validation", "sbd", "sbd.option-timeout-action", "sbd.shared-block-device", "status.corosync.membership", "status.pcmk.resources.hide-inactive", "status.pcmk.resources.id", "status.pcmk.resources.node", "status.pcmk.resources.orphaned", "status.pcmk.xml", "stonith-agents.describe", "stonith-agents.list", "stonith-agents.list.detailed", "stonith-agents.ocf.version-1-0", "stonith-agents.ocf.version-1-1", "stonith-agents.self-validation" ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Install cluster packages] ********* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:34 Saturday 11 February 2023 20:45:21 +0000 (0:00:00.114) 0:02:51.662 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure firewall] *************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:39 Saturday 11 February 2023 20:45:21 +0000 (0:00:00.184) 0:02:51.847 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure selinux] **************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:42 Saturday 11 February 2023 20:45:22 +0000 (0:00:00.162) 0:02:52.010 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute fence-virt authkey] **** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:45 Saturday 11 February 2023 20:45:22 +0000 (0:00:00.088) 0:02:52.098 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure SBD] ******************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:50 Saturday 11 February 2023 20:45:22 +0000 (0:00:00.199) 0:02:52.298 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure corosync] *************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:53 Saturday 11 February 2023 20:45:22 +0000 (0:00:00.094) 0:02:52.392 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Pcs auth] ************************* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:56 Saturday 11 February 2023 20:45:22 +0000 (0:00:00.154) 0:02:52.547 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute cluster shared keys] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:61 Saturday 11 February 2023 20:45:22 +0000 (0:00:00.224) 0:02:52.771 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Enable or disable cluster services on boot] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:67 Saturday 11 February 2023 20:45:22 +0000 (0:00:00.091) 0:02:52.863 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Start the cluster and reload corosync.conf] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:70 Saturday 11 February 2023 20:45:23 +0000 (0:00:00.156) 0:02:53.019 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Create and push CIB] ************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:73 Saturday 11 February 2023 20:45:23 +0000 (0:00:00.122) 0:02:53.142 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Remove cluster configuration] ***** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:79 Saturday 11 February 2023 20:45:23 +0000 (0:00:00.183) 0:02:53.325 ***** included: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/cluster-destroy-pcs-0.10.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Remove cluster configuration] ***** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/cluster-destroy-pcs-0.10.yml:9 Saturday 11 February 2023 20:45:23 +0000 (0:00:00.143) 0:02:53.468 ***** ok: [sut] => (item=/etc/corosync/corosync.conf) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "pcs", "cluster", "destroy" ], "delta": null, "end": null, "item": "/etc/corosync/corosync.conf", "rc": 0, "start": null } STDOUT: skipped, since /etc/corosync/corosync.conf does not exist MSG: Did not run command since '/etc/corosync/corosync.conf' does not exist ok: [sut] => (item=/var/lib/pacemaker/cib/cib.xml) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "pcs", "cluster", "destroy" ], "delta": null, "end": null, "item": "/var/lib/pacemaker/cib/cib.xml", "rc": 0, "start": null } STDOUT: skipped, since /var/lib/pacemaker/cib/cib.xml does not exist MSG: Did not run command since '/var/lib/pacemaker/cib/cib.xml' does not exist TASK [fedora.linux_system_roles.ha_cluster : Remove fence-virt authkey] ******** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:82 Saturday 11 February 2023 20:45:24 +0000 (0:00:00.646) 0:02:54.114 ***** ok: [sut] => { "changed": false, "path": "/etc/cluster/fence_xvm.key", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Configure qnetd] ****************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:87 Saturday 11 February 2023 20:45:24 +0000 (0:00:00.310) 0:02:54.425 ***** included: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-qnetd.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Remove qnetd configuration] ******* task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-qnetd.yml:3 Saturday 11 February 2023 20:45:24 +0000 (0:00:00.120) 0:02:54.546 ***** skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Setup qnetd] ********************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-qnetd.yml:15 Saturday 11 February 2023 20:45:24 +0000 (0:00:00.111) 0:02:54.658 ***** changed: [sut] => { "changed": true, "cmd": [ "pcs", "--start", "--", "qdevice", "setup", "model", "net" ], "delta": "0:00:01.475322", "end": "2023-02-11 20:45:26.579418", "failed_when_result": false, "rc": 0, "start": "2023-02-11 20:45:25.104096" } STDERR: Quorum device 'net' initialized Starting quorum device... quorum device started TASK [fedora.linux_system_roles.ha_cluster : Enable or disable qnetd service on boot] *** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-qnetd.yml:25 Saturday 11 February 2023 20:45:26 +0000 (0:00:01.875) 0:02:56.533 ***** ok: [sut] => { "changed": false, "enabled": false, "name": "corosync-qnetd", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2023-02-11 20:45:26 UTC", "ActiveEnterTimestampMonotonic": "1247100366", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target systemd-journald.socket system.slice -.mount basic.target network-online.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2023-02-11 20:45:26 UTC", "AssertTimestampMonotonic": "1247051499", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "45104000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanClean": "runtime", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2023-02-11 20:45:26 UTC", "ConditionTimestampMonotonic": "1247051377", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/corosync-qnetd.service", "ControlGroupId": "5147", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Corosync Qdevice Network daemon", "DevicePolicy": "auto", "Documentation": "man:corosync-qnetd", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/corosync-qnetd (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "7228", "ExecMainStartTimestamp": "Sat 2023-02-11 20:45:26 UTC", "ExecMainStartTimestampMonotonic": "1247053586", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/corosync-qnetd ; argv[]=/usr/bin/corosync-qnetd -f $COROSYNC_QNETD_OPTIONS ; ignore_errors=no ; start_time=[Sat 2023-02-11 20:45:26 UTC] ; stop_time=[n/a] ; pid=7228 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/corosync-qnetd ; argv[]=/usr/bin/corosync-qnetd -f $COROSYNC_QNETD_OPTIONS ; flags= ; start_time=[Sat 2023-02-11 20:45:26 UTC] ; stop_time=[n/a] ; pid=7228 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/corosync-qnetd.service", "FreezerState": "running", "GID": "987", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "corosync-qnetd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2023-02-11 20:45:26 UTC", "InactiveExitTimestampMonotonic": "1247054321", "InvocationID": "eb6e454b53d14de090b2d07613b641c5", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13988", "LimitNPROCSoft": "13988", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13988", "LimitSIGPENDINGSoft": "13988", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "7228", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "6660096", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "corosync-qnetd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount network-online.target", "RequiresMountsFor": "/run/corosync-qnetd", "Restart": "on-abnormal", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectory": "corosync-qnetd", "RuntimeDirectoryMode": "0770", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2023-02-11 20:45:26 UTC", "StateChangeTimestampMonotonic": "1247100366", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22380", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "987", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "User": "coroqnetd", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [Get services status] ***************************************************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:35 Saturday 11 February 2023 20:45:27 +0000 (0:00:00.690) 0:02:57.224 ***** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "running", "status": "disabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhsm-facts.service": { "name": "rhsm-facts.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rhsm.service": { "name": "rhsm.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rhsmcertd.service": { "name": "rhsmcertd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-system-token.service": { "name": "systemd-boot-system-token.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Check services status] *************************************************** task path: /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:38 Saturday 11 February 2023 20:45:30 +0000 (0:00:03.213) 0:03:00.438 ***** ok: [sut] => { "changed": false } MSG: All assertions passed PLAY RECAP ********************************************************************* sut : ok=30 changed=5 unreachable=0 failed=0 skipped=26 rescued=0 ignored=0 Saturday 11 February 2023 20:45:30 +0000 (0:00:00.086) 0:03:00.524 ***** =============================================================================== fedora.linux_system_roles.ha_cluster : Set hacluster password ---------- 23.76s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:21 fedora.linux_system_roles.ha_cluster : Install role essential packages -- 20.65s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:14 Gathering Facts -------------------------------------------------------- 17.95s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:9 fedora.linux_system_roles.ha_cluster : Stop pcsd ----------------------- 12.42s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:6 fedora.linux_system_roles.ha_cluster : Make sure qnetd config files are not present --- 9.76s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:15 fedora.linux_system_roles.ha_cluster : Extract qdevice settings --------- 9.52s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:48 fedora.linux_system_roles.ha_cluster : Make sure qnetd is not installed --- 8.42s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:9 fedora.linux_system_roles.ha_cluster : Fail if passwords are not specified --- 6.11s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:5 Run HA Cluster role ----------------------------------------------------- 5.62s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:31 fedora.linux_system_roles.ha_cluster : Figure out if ATB needs to be enabled for SBD --- 4.95s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:57 fedora.linux_system_roles.ha_cluster : Configure pcs / pcsd ------------- 4.84s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:28 fedora.linux_system_roles.ha_cluster : Ensure ansible_facts used by role --- 4.64s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:2 fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot --- 4.63s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:88 fedora.linux_system_roles.ha_cluster : Set platform/version specific variables --- 3.98s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:7 Get services status ----------------------------------------------------- 3.21s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/tests/tests_qnetd_disabled.yml:35 fedora.linux_system_roles.ha_cluster : Check and prepare role variables --- 3.20s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:6 fedora.linux_system_roles.ha_cluster : Fail if SBD needs ATB enabled and the user configured ATB to be disabled --- 3.12s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:67 fedora.linux_system_roles.ha_cluster : Discover cluster node names ------ 2.94s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:40 fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf ----- 2.60s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/pcs-configure-pcs-pcsd.yml:79 fedora.linux_system_roles.ha_cluster : Collect cluster node names ------- 2.55s /WORKDIR/dist-git-ha_cluster-weekly-ci-ar7vq6vu/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/check-and-prepare-role-variables.yml:44 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [DEPRECATION WARNING]: [defaults]callback_whitelist option, normalizing names to new standard, use callbacks_enabled instead. This feature will be removed from ansible-core in version 2.15. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. [DEPRECATION WARNING]: Encryption using the Python crypt module is deprecated. The Python crypt module is deprecated and will be removed from Python 3.13. Install the passlib library for continued encryption functionality. This feature will be removed in version 2.17. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ---^---^---^---^---^---