Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save tuxerrante/36f0c6b6981db93d5aff2312d84ee612 to your computer and use it in GitHub Desktop.
Save tuxerrante/36f0c6b6981db93d5aff2312d84ee612 to your computer and use it in GitHub Desktop.

How to set up a testing environment for AppArmor operator with Microk8s in a new Virtualbox Ubuntu machine

# Totally Optional plugins
snap install starship

cat >> .bashrc <<'EOF'
eval "$(starship init bash)"
source <(helm completion bash)
source <(kubectl completion bash)
EOF

mkdir -p ~/.config && touch ~/.config/starship.toml

cat >~/.config/starship.toml <<'EOF'
[container]
format = '[$symbol \[$name\]]($style) '

[git_branch]
symbol = '🌱 '
truncation_length = 4
truncation_symbol = ''
ignore_branches = ['master', 'main']

[kubernetes]
format = 'on [⛵ ($user on )($cluster in )$context \($namespace\)](dimmed green) '
disabled = false

[time]
disabled = false
format = '[\[ $time \]]($style) '
time_format = '%T'

EOF
# ---------------

# ---------------
# Remove special characters from your machine name
hostnamectl hostname virtualbox
# restart shell
exit

# enable cgroups
# https://github.com/opencontainers/runc/blob/main/docs/cgroup-v2.md
sudo apt-get -y install cgroup-tools
stat -c %T -f /sys/fs/cgroup
#> cgroup2fs

cat /proc/cgroups
#> subsys_name	hierarchy	num_cgroups	enabled
#> cpuset	    0	        300	        1
#> cpu	        0	        300	        1
shutdown --reboot

# -----
# Install Microk8s
# https://projectcalico.docs.tigera.io/getting-started/kubernetes/microk8s
sudo snap install microk8s --classic --channel=latest/stable
microk8s enable helm
microk8s enable dns

microk8s inspect
aa-status |grep microk8s
microk8s kubectl get nodes -o=jsonpath='{range .items[*]}{@.metadata.name}: {.status.conditions[?(@.reason=="KubeletReady")].message}'; echo

# Check if you have configured networking correctly
# https://projectcalico.docs.tigera.io/getting-started/kubernetes/hardway/test-networking

# -----
# Install cert-manager as a requisite
kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.10.1/cert-manager.yaml
kubectl --namespace cert-manager wait --for condition=ready pod -l app.kubernetes.io/instance=cert-manager

sudo microk8s config > ~/.kube/config
chmod 600 .kube/config

# -----
# Install and apply security-profiles-operator Helm chart
# https://github.com/kubernetes-sigs/security-profiles-operator/blob/v0.6.0/installation-usage.md#installation-using-helm
# Test installation first
helm install \
    --dry-run \
    --atomic \
    security-profiles-operator https://github.com/kubernetes-sigs/security-profiles-operator/releases/download/v0.6.0/security-profiles-operator-0.6.0.tgz | less

# Install it.
# It seems the ns arg is ignored, it is fixed to 'security-profiles-operator'
helm install \
    --atomic \
    security-profiles-operator https://github.com/kubernetes-sigs/security-profiles-operator/releases/download/v0.6.0/security-profiles-operator-0.6.0.tgz

helm list -A
helm status security-profiles-operator 

# -----
# https://github.com/kubernetes-sigs/security-profiles-operator/blob/v0.6.0/installation-usage.md#create-an-apparmor-profile
mkdir -p profiles
cat >profiles/system-call-restriction.yml <<'EOF'
apiVersion: security-profiles-operator.x-k8s.io/v1alpha1
kind: AppArmorProfile
metadata:
  name: system-call-restriction
  annotations:
    description: Block writing to any files in the disk.
spec:
  policy: |
    profile system-call-restriction flags=(attach_disconnected) {
        file,
        deny /** rw,
        deny /etc/** w,
        /** /app/** /etc/default/** /etc/fonts/** rw,
    }
EOF

kubectl apply -f profiles/system-call-restriction.yml

kubectl create deployment testingpod --image=busybox --replicas=1  -- sleep infinity
kubectl annotate deployment testingpod container.apparmor.security.beta.kubernetes.io/busybox=system-call-restriction

kubectl logs -n security-profiles-operator deployments/security-profiles-operator |less
#> see output below


❯ kubectl get deploy -A  --sort-by .metadata.namespace -o custom-columns=NAMESPACE:.metadata.namespace,NAME:.metadata.name,IMAGES:.spec.template.spec.containers[*].image,CONTAINERS:.spec.template.spec.containers[*].name,READY:.status.readyReplicas,TOTAL:.status.replicas
NAMESPACE                    NAME                                 IMAGES                                                             CONTAINERS                   READY   TOTAL
cert-manager                 cert-manager-cainjector              quay.io/jetstack/cert-manager-cainjector:v1.10.1                   cert-manager-cainjector      1       1
cert-manager                 cert-manager                         quay.io/jetstack/cert-manager-controller:v1.10.1                   cert-manager-controller      1       1
cert-manager                 cert-manager-webhook                 quay.io/jetstack/cert-manager-webhook:v1.10.1                      cert-manager-webhook         1       1
default                      testingpod                           busybox                                                            busybox                      1       1
kube-system                  dashboard-metrics-scraper            kubernetesui/metrics-scraper:v1.0.8                                dashboard-metrics-scraper    1       1
kube-system                  kubernetes-dashboard                 kubernetesui/dashboard:v2.7.0                                      kubernetes-dashboard         1       1
kube-system                  calico-kube-controllers              docker.io/calico/kube-controllers:v3.23.3                          calico-kube-controllers      1       1
kube-system                  coredns                              coredns/coredns:1.9.3                                              coredns                      1       1
kube-system                  metrics-server                       k8s.gcr.io/metrics-server/metrics-server:v0.5.2                    metrics-server               1       1
security-profiles-operator   security-profiles-operator           gcr.io/k8s-staging-sp-operator/security-profiles-operator:latest   security-profiles-operator   3       3
security-profiles-operator   security-profiles-operator-webhook   gcr.io/k8s-staging-sp-operator/security-profiles-operator:latest   security-profiles-operator   3       3

Further steps:

Logs:

kubectl logs -n security-profiles-operator -l=app=security-profiles-operator

Defaulted container "security-profiles-operator" out of: security-profiles-operator, metrics, non-root-enabler (init)
I0102 16:07:45.300026       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:08:13.032137       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:08:40.745962       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:09:08.477762       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:09:36.195514       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:10:03.919517       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:10:31.641465       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:10:59.361206       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:11:27.081821       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 16:11:54.805664       1 ca.go:62] spod-config "msg"="Using cert-manager as certificate provider" 
I0102 13:53:26.439078       1 leaderelection.go:248] attempting to acquire leader lease security-profiles-operator/security-profiles-operator-lock...
E0102 14:47:29.925094       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused - error from a previous attempt: read tcp 10.1.228.179:40908->10.152.183.1:443: read: connection reset by peer
E0102 14:47:33.798901       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:37.270211       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:39.346644       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:42.772138       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:46.915475       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:51.123473       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:53.499441       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:56.593451       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-lock": dial tcp 10.152.183.1:443: connect: connection refused
I0102 13:53:41.092650       1 leaderelection.go:248] attempting to acquire leader lease security-profiles-operator/security-profiles-operator-webhook-lock...
E0102 14:47:30.589735       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:34.363210       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:38.058155       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:40.864823       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:43.926254       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:48.304521       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:50.341829       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:54.484015       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
I0102 14:48:17.935921       1 leaderelection.go:258] successfully acquired lease security-profiles-operator/security-profiles-operator-webhook-lock
E0102 14:47:29.897686       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused - error from a previous attempt: read tcp 10.1.228.186:55284->10.152.183.1:443: read: connection reset by peer
E0102 14:47:32.394162       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:34.978651       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:38.646782       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:42.286575       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:44.525827       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:48.264021       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:52.096979       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:47:54.459939       1 leaderelection.go:330] error retrieving resource lock security-profiles-operator/security-profiles-operator-webhook-lock: Get "https://10.152.183.1:443/apis/coordination.k8s.io/v1/namespaces/security-profiles-operator/leases/security-profiles-operator-webhook-lock": dial tcp 10.152.183.1:443: connect: connection refused
E0102 14:48:18.213932       1 leaderelection.go:367] Failed to update lock: Operation cannot be fulfilled on leases.coordination.k8s.io "security-profiles-operator-webhook-lock": the object has been modified; please apply your changes to the latest version and try again
I0102 14:48:14.864029       1 main.go:269]  "msg"="Set logging verbosity to 0" 
I0102 14:48:14.864118       1 main.go:275]  "msg"="Profiling support enabled: false" 
I0102 14:48:14.864157       1 main.go:295] setup "msg"="starting component: security-profiles-operator" "buildDate"="1980-01-01T00:00:00Z" "buildTags"="netgo,osusergo,seccomp,apparmor" "cgoldFlags"="unknown" "compiler"="gc" "dependencies"="github.com/acobaugh/osrelease v0.1.0 ,github.com/aquasecurity/libbpfgo v0.4.4-libbpf-1.0.1 ,github.com/beorn7/perks v1.0.1 ,github.com/blang/semver/v4 v4.0.0 ,github.com/cert-manager/cert-manager v1.10.1 ,github.com/cespare/xxhash/v2 v2.1.2 ,github.com/containers/common v0.50.1 ,github.com/cpuguy83/go-md2man/v2 v2.0.2 ,github.com/davecgh/go-spew v1.1.1 ,github.com/emicklei/go-restful/v3 v3.8.0 ,github.com/evanphx/json-patch/v5 v5.6.0 ,github.com/fsnotify/fsnotify v1.5.4 ,github.com/go-logr/logr v1.2.3 ,github.com/go-openapi/jsonpointer v0.19.5 ,github.com/go-openapi/jsonreference v0.20.0 ,github.com/go-openapi/swag v0.22.3 ,github.com/gogo/protobuf v1.3.2 ,github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da ,github.com/golang/protobuf v1.5.2 ,github.com/google/gnostic v0.6.9 ,github.com/google/go-cmp v0.5.9 ,github.com/google/gofuzz v1.2.0 ,github.com/google/uuid v1.3.0 ,github.com/imdario/mergo v0.3.13 ,github.com/jellydator/ttlcache/v3 v3.0.0 ,github.com/josharian/intern v1.0.0 ,github.com/json-iterator/go v1.1.12 ,github.com/mailru/easyjson v0.7.7 ,github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 ,github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd ,github.com/modern-go/reflect2 v1.0.2 ,github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 ,github.com/nxadm/tail v1.4.8 ,github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417 ,github.com/openshift/api v0.0.0-20221205111557-f2fbb1d1cd5e ,github.com/pjbgf/go-apparmor v0.1.1 ,github.com/pkg/errors v0.9.1 ,github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring v0.61.1 ,github.com/prometheus/client_golang v1.14.0 ,github.com/prometheus/client_model v0.3.0 ,github.com/prometheus/common v0.37.0 ,github.com/prometheus/procfs v0.8.0 ,github.com/russross/blackfriday/v2 v2.1.0 ,github.com/seccomp/libseccomp-golang v0.10.0 ,github.com/sirupsen/logrus v1.9.0 ,github.com/spf13/pflag v1.0.5 ,github.com/urfave/cli/v2 v2.23.6 ,github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 ,golang.org/x/mod v0.7.0 ,golang.org/x/net v0.4.0 ,golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1 ,golang.org/x/sync v0.1.0 ,golang.org/x/sys v0.3.0 ,golang.org/x/term v0.3.0 ,golang.org/x/text v0.5.0 ,golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 ,gomodules.xyz/jsonpatch/v2 v2.2.0 ,google.golang.org/genproto v0.0.0-20220805133916-01dd62135a58 ,google.golang.org/grpc v1.51.0 ,google.golang.org/protobuf v1.28.1 ,gopkg.in/inf.v0 v0.9.1 ,gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 ,gopkg.in/yaml.v2 v2.4.0 ,gopkg.in/yaml.v3 v3.0.1 ,k8s.io/api v0.25.4 ,k8s.io/apiextensions-apiserver v0.25.4 ,k8s.io/apimachinery v0.25.5 ,k8s.io/client-go v0.25.4 ,k8s.io/component-base v0.25.4 ,k8s.io/klog/v2 v2.80.1 ,k8s.io/kube-openapi v0.0.0-20220803164354-a70c9af30aea ,k8s.io/utils v0.0.0-20221108210102-8e77b1f39fe2 ,sigs.k8s.io/controller-runtime v0.13.1 ,sigs.k8s.io/gateway-api v0.5.0 ,sigs.k8s.io/json v0.0.0-20220713155537-f223a00ba0e2 ,sigs.k8s.io/release-utils v0.7.3 ,sigs.k8s.io/structured-merge-diff/v4 v4.2.3 ,sigs.k8s.io/yaml v1.3.0 " "gitCommit"="unknown" "gitCommitDate"="unknown" "gitTreeState"="clean" "goVersion"="go1.19.3" "ldFlags"="unknown" "libbpf"="v1.0" "libseccomp"="2.5.4" "platform"="linux/amd64" "version"="0.6.1-dev"
I0102 14:48:14.864634       1 main.go:374] setup "msg"="watching all namespaces" 
I0102 14:48:15.160835       1 listener.go:44] controller-runtime/metrics "msg"="Metrics server is starting to listen" "addr"=":8080"
I0102 14:48:15.730692       1 main.go:357] setup "msg"="starting manager" 
I0102 14:48:15.737717       1 internal.go:366]  "msg"="Starting server" "addr"={"IP":"::","Port":8080,"Zone":""} "kind"="metrics" "path"="/metrics"
I0102 14:48:15.856208       1 leaderelection.go:248] attempting to acquire leader lease security-profiles-operator/security-profiles-operator-lock...
I0102 14:48:21.187277       1 main.go:541] setup "msg"="registering webhooks" 
I0102 14:48:21.207558       1 server.go:148] controller-runtime/webhook "msg"="Registering webhook" "path"="/mutate-v1-pod-binding"
I0102 14:48:21.207626       1 server.go:148] controller-runtime/webhook "msg"="Registering webhook" "path"="/mutate-v1-pod-recording"
I0102 14:48:21.220343       1 main.go:547] setup "msg"="starting webhook" 
I0102 14:48:21.222301       1 server.go:216] controller-runtime/webhook/webhooks "msg"="Starting webhook server" 
I0102 14:48:21.222453       1 certwatcher.go:131] controller-runtime/certwatcher "msg"="Updated current TLS certificate" 
I0102 14:48:21.222509       1 server.go:270] controller-runtime/webhook "msg"="Serving webhook server" "host"="" "port"=9443
I0102 14:48:21.225419       1 internal.go:366]  "msg"="Starting server" "addr"={"IP":"::","Port":8080,"Zone":""} "kind"="metrics" "path"="/metrics"
I0102 14:48:21.229684       1 leaderelection.go:248] attempting to acquire leader lease security-profiles-operator/security-profiles-operator-webhook-lock...
I0102 14:48:21.233792       1 certwatcher.go:85] controller-runtime/certwatcher "msg"="Starting certificate watcher" 
Error from server (BadRequest): container "security-profiles-operator" in pod "spod-lxpkc" is waiting to start: PodInitializing
[ 17:19:21 ] ❯ kubectl get events -n security-profiles-operator
LAST SEEN   TYPE      REASON    OBJECT           MESSAGE
16s         Warning   BackOff   pod/spod-lxpkc   Back-off restarting failed container non-root-enabler in pod spod-lxpkc_security-profiles-operator(d77aec90-4f4e-408b-b010-5209aa80b244)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment