From 170c003bc768e650a5abe62ec04caa9f9518d11e Mon Sep 17 00:00:00 2001 From: Clark Boylan Date: Wed, 12 Feb 2025 09:34:05 -0800 Subject: [PATCH] Install apparmor when installing podman The old install-docker upstream.yaml tasks installed apparmor for docker (it was origianlly a dependency but then docker removed it as an explicit dependency while still explicitly depending on it so we manually installed it). When we started deploying Noble nodes with podman via the install-docker role we didn't get apparmor because podman doesn't appear to depend on it. However when we got to production the production images already come with apparmor which includes profiles for things like podman and rsyslog which have caused problems for us deploying services with podman. Attempt to catch these issues in CI by explicitly installing apparmor. This should be a noop for production beceaus apparmor is already installed. This should help us catch problems with podman in CI before we ever get to production. To ensure that apparmor is working properly we capture apparmor_status output as part of our system-config-run job log collection. Note we remove the zuul lb test for haproxy.log being present as current apparmor problems with the rsyslogd profile prevent that from occuring on noble. The next change will correct that issue and reinstate the test case. Change-Id: Iea5966dbb2dcfbe1e51d9c00bad67a9d37e1b7e1 --- playbooks/roles/install-docker/tasks/Ubuntu.noble.yaml | 4 ++++ playbooks/zuul/run-base-post.yaml | 5 +++++ testinfra/test_zuul_lb.py | 7 ------- zuul.d/system-config-run.yaml | 1 + 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/playbooks/roles/install-docker/tasks/Ubuntu.noble.yaml b/playbooks/roles/install-docker/tasks/Ubuntu.noble.yaml index 7716960339..ffc22b1b4b 100644 --- a/playbooks/roles/install-docker/tasks/Ubuntu.noble.yaml +++ b/playbooks/roles/install-docker/tasks/Ubuntu.noble.yaml @@ -18,6 +18,10 @@ # TODO do we need these extra tools? - buildah - skopeo + # Production nodes have apparmor but CI nodes don't. List it + # explicitly here to resolve the delta. The old docker upstream + # install path also installed apparmor. + - apparmor state: present - name: Disable docker daemon service diff --git a/playbooks/zuul/run-base-post.yaml b/playbooks/zuul/run-base-post.yaml index da23b7ab92..1248ae7e41 100644 --- a/playbooks/zuul/run-base-post.yaml +++ b/playbooks/zuul/run-base-post.yaml @@ -25,6 +25,11 @@ - docker - podman + - name: Get AppArmor Status + shell: 'apparmor_status | tee /var/log/apparmor_status' + become: yes + failed_when: false + - include_role: name: stage-output diff --git a/testinfra/test_zuul_lb.py b/testinfra/test_zuul_lb.py index 58b25d3044..2543aee2d4 100644 --- a/testinfra/test_zuul_lb.py +++ b/testinfra/test_zuul_lb.py @@ -32,10 +32,3 @@ def test_haproxy_statsd_running(host): out = json.loads(cmd.stdout) assert out[0]["State"]["Status"] == "running" assert out[0]["RestartCount"] == 0 - -def test_haproxy_logging(host): - # rsyslog is configured to add a unix socket at this path - assert host.file('/var/lib/haproxy/dev/log').is_socket - # Haproxy logs to syslog via the above socket which produces - # this logfile - assert host.file('/var/log/haproxy.log').is_file diff --git a/zuul.d/system-config-run.yaml b/zuul.d/system-config-run.yaml index 3edfe4b124..2c99d5edce 100644 --- a/zuul.d/system-config-run.yaml +++ b/zuul.d/system-config-run.yaml @@ -13,6 +13,7 @@ zuul_copy_output: "{{ copy_output | combine(host_copy_output | default({})) }}" stage_dir: "{{ ansible_user_dir }}/zuul-output" copy_output: + '/var/log/apparmor_status': logs_txt '/var/log/syslog': logs_txt '/var/log/messages': logs_txt '/var/log/exim4': logs