Skip to content

[2.8.0] Operator panics if proxy section is not defined #1367

@Kajot-dev

Description

@Kajot-dev

Report

When operator starts, it immediately panics, with:

2025-11-20T11:25:19.697Z	ERROR	controller-runtime.cache	Observed a panic	{"panic": "runtime error: invalid memory address or nil pointer dereference", "panicGoValue": "\"invalid memory address or nil pointer dereference\"", "stacktrace": "goroutine 111 [running]:\nk8s.io/apimachinery/pkg/util/runtime.logPanic({0x290e9f8, 0xc00108ddd0}, {0x2106440, 0x3cd06d0})\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:132 +0xbc\nk8s.io/apimachinery/pkg/util/runtime.handleCrash({0x290e9f8, 0xc000d04060}, {0x2106440, 0x3cd06d0}, {0x0, 0x0, 0x456200?})\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:107 +0x116\nk8s.io/apimachinery/pkg/util/runtime.HandleCrashWithContext({0x290e9f8, 0xc000d04060}, {0x0, 0x0, 0x0})\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:78 +0x5a\npanic({0x2106440?, 0x3cd06d0?})\n\t/usr/local/go/src/runtime/panic.go:783 +0x132\nk8s.io/apimachinery/pkg/util/runtime.handleCrash({0x290e9f8, 0xc000d04060}, {0x2106440, 0x3cd06d0}, {0x0, 0x0, 0xc000cb93f0?})\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:114 +0x1a9\nk8s.io/apimachinery/pkg/util/runtime.HandleCrashWithContext({0x290e9f8, 0xc000d04060}, {0x0, 0x0, 0x0})\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:78 +0x5a\npanic({0x2106440?, 0x3cd06d0?})\n\t/usr/local/go/src/runtime/panic.go:783 +0x132\ngithub.com/percona/percona-postgresql-operator/v2/pkg/apis/pgv2.percona.com/v2.(*PerconaPGCluster).EnvFromSecrets(0xc0007a7688)\n\t/go/src/github.com/percona/percona-postgresql-operator/pkg/apis/pgv2.percona.com/v2/perconapgcluster_types.go:1114 +0x66\ngithub.com/percona/percona-postgresql-operator/v2/pkg/apis/pgv2.percona.com/v2.init.func2({0x292dda0?, 0xc0007a7688?})\n\t/go/src/github.com/percona/percona-postgresql-operator/pkg/apis/pgv2.percona.com/v2/perconapgcluster_types.go:1141 +0x2d\nsigs.k8s.io/controller-runtime/pkg/cache.indexByField.func1({0x2404aa0?, 0xc0007a7688})\n\t/go/pkg/mod/sigs.k8s.io/[email protected]/pkg/cache/informer_cache.go:236 +0xa7\nk8s.io/client-go/tools/cache.(*storeIndex).updateSingleIndex(0xc0001123a0, {0xc000b89f20, 0x1e}, {0x0, 0x0}, {0x2404aa0, 0xc0007a7688}, {0xc000e00c00, 0x22})\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/thread_safe_store.go:162 +0xf4\nk8s.io/client-go/tools/cache.(*storeIndex).updateIndices(...)\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/thread_safe_store.go:196\nk8s.io/client-go/tools/cache.(*threadSafeMap).Update(0xc0001b3ec0, {0xc000e00c00, 0x22}, {0x2404aa0, 0xc0007a7688})\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/thread_safe_store.go:241 +0x1a5\nk8s.io/client-go/tools/cache.(*threadSafeMap).Add(0x2404aa0?, {0xc000e00c00?, 0xc0001b3ec0?}, {0x2404aa0?, 0xc0007a7688?})\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/thread_safe_store.go:233 +0x25\nk8s.io/client-go/tools/cache.(*cache).Add(0xc000960260, {0x2404aa0, 0xc0007a7688})\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/store.go:182 +0xf1\nk8s.io/client-go/tools/cache.processDeltas({0x290c250, 0xc000233760}, {0x291b420, 0xc000960260}, {0xc000960500?, 0x53521e?, 0xc000072008?}, 0x1)\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/controller.go:573 +0x1b2\nk8s.io/client-go/tools/cache.(*sharedIndexInformer).HandleDeltas(0xc000233760, {0x2131c00?, 0xc000628240?}, 0x1)\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/shared_informer.go:729 +0x19e\nk8s.io/client-go/tools/cache.(*RealFIFO).Pop(0xc000754000, 0xc0004b8020)\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/the_real_fifo.go:234 +0x487\nk8s.io/client-go/tools/cache.(*controller).processLoop(0xc000d060b0, {0x290e9f8, 0xc000d04060})\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/controller.go:211 +0x5f\nk8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1({0x290e9f8?, 0xc000d04060?}, 0xc000d04090?)\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/wait/backoff.go:255 +0x51\nk8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext({0x290e9f8, 0xc000d04060}, 0xc000cb9d68, {0x28eab20, 0xc000d04090}, 0x1)\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/wait/backoff.go:256 +0xe5\nk8s.io/apimachinery/pkg/util/wait.JitterUntilWithContext({0x290e9f8, 0xc000d04060}, 0xc000d20d68, 0x3b9aca00, 0x0, 0x1)\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/wait/backoff.go:223 +0x8f\nk8s.io/apimachinery/pkg/util/wait.UntilWithContext(...)\n\t/go/pkg/mod/k8s.io/[email protected]/pkg/util/wait/backoff.go:172\nk8s.io/client-go/tools/cache.(*controller).RunWithContext(0xc000d060b0, {0x290e9f8, 0xc000d04060})\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/controller.go:183 +0x418\nk8s.io/client-go/tools/cache.(*sharedIndexInformer).RunWithContext(0xc000233760, {0x290e9f8, 0xc000d04060})\n\t/go/pkg/mod/k8s.io/[email protected]/tools/cache/shared_informer.go:587 +0x39a\nsigs.k8s.io/controller-runtime/pkg/cache/internal.(*Cache).Start(0xc0004fa150, 0x0?)\n\t/go/pkg/mod/sigs.k8s.io/[email protected]/pkg/cache/internal/informers.go:114 +0xe3\nsigs.k8s.io/controller-runtime/pkg/cache/internal.(*Informers).startInformerLocked.func1()\n\t/go/pkg/mod/sigs.k8s.io/[email protected]/pkg/cache/internal/informers.go:248 +0x75\ncreated by sigs.k8s.io/controller-runtime/pkg/cache/internal.(*Informers).startInformerLocked in goroutine 109\n\t/go/pkg/mod/sigs.k8s.io/[email protected]/pkg/cache/internal/informers.go:246 +0x87\n"}
k8s.io/apimachinery/pkg/util/runtime.handleCrash
	/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:107
k8s.io/apimachinery/pkg/util/runtime.HandleCrashWithContext
	/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:78
runtime.gopanic
	/usr/local/go/src/runtime/panic.go:783
k8s.io/apimachinery/pkg/util/runtime.handleCrash
	/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:114
k8s.io/apimachinery/pkg/util/runtime.HandleCrashWithContext
	/go/pkg/mod/k8s.io/[email protected]/pkg/util/runtime/runtime.go:78
runtime.gopanic
	/usr/local/go/src/runtime/panic.go:783
runtime.panicmem
	/usr/local/go/src/runtime/panic.go:262
runtime.sigpanic
	/usr/local/go/src/runtime/signal_unix.go:925
github.com/percona/percona-postgresql-operator/v2/pkg/apis/pgv2.percona.com/v2.(*PerconaPGCluster).EnvFromSecrets
	/go/src/github.com/percona/percona-postgresql-operator/pkg/apis/pgv2.percona.com/v2/perconapgcluster_types.go:1114
github.com/percona/percona-postgresql-operator/v2/pkg/apis/pgv2.percona.com/v2.init.func2
	/go/src/github.com/percona/percona-postgresql-operator/pkg/apis/pgv2.percona.com/v2/perconapgcluster_types.go:1141
sigs.k8s.io/controller-runtime/pkg/cache.indexByField.func1
	/go/pkg/mod/sigs.k8s.io/[email protected]/pkg/cache/informer_cache.go:236
k8s.io/client-go/tools/cache.(*storeIndex).updateSingleIndex
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/thread_safe_store.go:162
k8s.io/client-go/tools/cache.(*storeIndex).updateIndices
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/thread_safe_store.go:196
k8s.io/client-go/tools/cache.(*threadSafeMap).Update
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/thread_safe_store.go:241
k8s.io/client-go/tools/cache.(*threadSafeMap).Add
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/thread_safe_store.go:233
k8s.io/client-go/tools/cache.(*cache).Add
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/store.go:182
k8s.io/client-go/tools/cache.processDeltas
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/controller.go:573
k8s.io/client-go/tools/cache.(*sharedIndexInformer).HandleDeltas
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/shared_informer.go:729
k8s.io/client-go/tools/cache.(*RealFIFO).Pop
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/the_real_fifo.go:234
k8s.io/client-go/tools/cache.(*controller).processLoop
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/controller.go:211
k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext.func1
	/go/pkg/mod/k8s.io/[email protected]/pkg/util/wait/backoff.go:255
k8s.io/apimachinery/pkg/util/wait.BackoffUntilWithContext
	/go/pkg/mod/k8s.io/[email protected]/pkg/util/wait/backoff.go:256
k8s.io/apimachinery/pkg/util/wait.JitterUntilWithContext
	/go/pkg/mod/k8s.io/[email protected]/pkg/util/wait/backoff.go:223
k8s.io/apimachinery/pkg/util/wait.UntilWithContext
	/go/pkg/mod/k8s.io/[email protected]/pkg/util/wait/backoff.go:172
k8s.io/client-go/tools/cache.(*controller).RunWithContext
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/controller.go:183
k8s.io/client-go/tools/cache.(*sharedIndexInformer).RunWithContext
	/go/pkg/mod/k8s.io/[email protected]/tools/cache/shared_informer.go:587
sigs.k8s.io/controller-runtime/pkg/cache/internal.(*Cache).Start
	/go/pkg/mod/sigs.k8s.io/[email protected]/pkg/cache/internal/informers.go:114
sigs.k8s.io/controller-runtime/pkg/cache/internal.(*Informers).startInformerLocked.func1
	/go/pkg/mod/sigs.k8s.io/[email protected]/pkg/cache/internal/informers.go:248

More about the problem

I have only one PerconaPGCluster resource:

apiVersion: pgv2.percona.com/v2
kind: PerconaPGCluster
metadata:
  creationTimestamp: "2025-04-17T14:06:01Z"
  finalizers:
  - internal.percona.com/stop-watchers
  generation: 214
  name: harbor-postgresql
  namespace: postgresql-test
  resourceVersion: "2061519649"
  uid: 88b68ef3-5f54-4bc2-8763-3d84e289e448
spec:
  autoCreateUserSchema: false
  backups:
    pgbackrest:
      global:
        repo1-retention-archive: "3"
        repo1-retention-archive-type: full
        repo1-retention-diff: "3"
        repo1-retention-full: "3"
        repo1-retention-full-type: count
      image: image-registry.openshift-image-registry.svc:5000/postgresql-test/percona-harbor-pgbackrest:2.55.0
      manual:
        options:
        - --type=incr
        - --annotation="percona.com/backup-name"="harbor-postgresql-repo1-incr-29m25"
        repoName: repo1
      metadata:
        annotations:
          backup.velero.io/backup-volumes: repo1
      repoHost:
        resources:
          limits:
            cpu: 400m
            memory: 400Mi
          requests:
            cpu: 200m
            memory: 200Mi
      repos:
      - name: repo1
        schedules:
          differential: 0 0 * * 3
          full: 0 0 * * 0
          incremental: 0 0 * * 1-2,4-6
        volume:
          volumeClaimSpec:
            accessModes:
            - ReadWriteOncePod
            resources:
              requests:
                storage: 10Gi
            storageClassName: standard
      restore:
        enabled: false
        repoName: repo1
        resources: {}
  crVersion: 2.7.0
  databaseInitSQL:
    key: init.sql
    name: harbor-init-sql
  expose:
    type: ClusterIP
  extensions:
    builtin: {}
    storage: {}
  image: image-registry.openshift-image-registry.svc:5000/postgresql-test/percona-harbor-postgres:2.7.0-ppg15.13-postgres
  initContainer:
    image: image-registry.openshift-image-registry.svc:5000/postgresql-test/percona-harbor-postgres-operator:2.7.0
  instances:
  - affinity:
      podAntiAffinity:
        preferredDuringSchedulingIgnoredDuringExecution:
        - podAffinityTerm:
            labelSelector:
              matchExpressions:
              - key: postgres-operator.crunchydata.com/cluster
                operator: In
                values:
                - harbor-postgresql
              - key: postgres-operator.crunchydata.com/instance
                operator: Exists
            topologyKey: kubernetes.io/hostname
          weight: 1
    dataVolumeClaimSpec:
      accessModes:
      - ReadWriteOncePod
      resources:
        requests:
          storage: 5Gi
      storageClassName: gold
    name: gold
    replicas: 1
    resources:
      limits:
        cpu: "1"
        memory: 2500Mi
      requests:
        cpu: 500m
        memory: 2Gi
  openshift: true
  patroni:
    leaderLeaseDurationSeconds: 30
    port: 8008
    syncPeriodSeconds: 10
  pause: false
  port: 5432
  postgresVersion: 15
  secrets: {}
  standby:
    enabled: false
  users:
  - databases:
    - registry
    grantPublicSchemaAccess: true
    name: harbor
status:
  conditions:
  - lastTransitionTime: "2025-09-30T12:40:40Z"
    message: ""
    reason: AllConditionsAreTrue
    status: "True"
    type: ReadyForBackup
  host: harbor-postgresql-pgbouncer.postgresql-test.svc
  installedCustomExtensions: []
  patroniVersion: 4.0.5
  pgbouncer:
    ready: 1
    size: 1
  postgres:
    imageID: image-registry.openshift-image-registry.svc:5000/postgresql-test/percona-harbor-postgres@sha256:1d1c01cc53392a765459a6d912de1920e8802c2fe44d96aadf3aaa1399138481
    instances:
    - name: gold
      ready: 1
      size: 1
    ready: 1
    size: 1
    version: 15
  state: ready

Steps to reproduce

  1. Start the operator
  2. Operator panics

Versions

  1. Kubernetes (OKD/Openshift):
Client Version: 4.20.0
Kustomize Version: v5.6.0
Server Version: 4.19.0-okd-scos.19
Kubernetes Version: v1.32.8-dirty
  1. Operator 2.8.0
  2. Database 15.3

Anything else?

No response

Metadata

Metadata

Assignees

No one assigned

    Labels

    2.9.0bugSomething isn't working

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions